zoukankan      html  css  js  c++  java
  • R语言——实验4-人工神经网络

    带包实现:

    rm(list=ls())
    setwd("C:/Users/Administrator/Desktop/R语言与数据挖掘作业/实验4-人工神经网络")
    
    Data=read.csv("sales_data.csv")[,2:5]
    
    library(nnet)
    colnames(Data)<-c("x1","x2","x3","y")
    
    model1=nnet(y~.,data=Data,size=6,decay=5e-4,maxit=1000)
    
    pred=predict(model1,Data[,1:3],type="class")
    (p=sum(as.numeric(pred==Data$y))/nrow(Data))
    
    table(Data$y,pred)
    
    prop.table(table(Data$y,pred),1)

    2. 深入理解BP人工神经网络算法,并用R语言实现该算法 

    自己打出一个简单的神经网络

    rm(list=ls())
    #install.packages("sampling")
    library(sampling) 
    setwd("C:/Users/Administrator/Desktop/R语言与数据挖掘作业/实验4-人工神经网络")
    
    data("iris")
    #as.numeric(data[,5])
    #这里我们按照每种“Species”抽取3/5个样本进行抽样。
    n=round(3/5*nrow(iris)/3)
    
    sub_train=strata(iris,stratanames=("Species"),size=rep(n,3),method="srswor")
    
    head(sub_train)
    
    data_train=iris[sub_train$ID_unit,]
    data_test=iris[-sub_train$ID_unit,]
    dim(data_train)
    dim(data_test)
    
    #保存数据集
    write.csv(data_train,"./iris_data_train.csv")
    write.csv(data_test,"./iris_data_test.csv")
    
    #对data_train归一化处理
    #colnames()
    data_train$Sepal.Length=(data_train$Sepal.Length-min(data_train$Sepal.Length))*1.0/
      (max(data_train$Sepal.Length)-min(data_train$Sepal.Length))
    
    data_train$Sepal.Width=(data_train$Sepal.Width-min(data_train$Sepal.Width))*1.0/
      (max(data_train$Sepal.Width)-min(data_train$Sepal.Width))
    
    data_train$Petal.Length=(data_train$Petal.Length-min(data_train$Petal.Length))*1.0/
      (max(data_train$Petal.Length)-min(data_train$Petal.Length))
    
    data_train$Petal.Width=(data_train$Petal.Width-min(data_train$Petal.Width))*1.0/
      (max(data_train$Petal.Width)-min(data_train$Petal.Width))
    
    #4个输入,5个的隐藏层,3个输出层
    #第一块连接的地方需要4*5个w,5个a,第二块需要5*3个w,5个b
    
    f<-function(x)
    {
      x=1*1.0/(1+exp(-1*x))
      return(x)
    }
    
    #定义训练次数
    global_time=100
    
    #定义学习率
    learning_rate=1.8
    
    #随机生成第一块的w1
    w1=matrix(sample((0:100)*1.0/100,size=20),4,5)
    a1=matrix(sample((0:100)*1.0/100,size=5),1,5)
    
    w2=matrix(sample((0:100)*1.0/100,size=15),5,3)
    a2=matrix(sample((0:100)*1.0/100,size=3),1,3)
    
    
    #遍历每一条数据,每扔进一条数据就调参数
    for(time in 1:global_time)
    {
      for(i in 1:length(data_train$Species))
      {
        #1*4
        x1=matrix(c(data_train$Sepal.Length[i],data_train$Sepal.Width[i],data_train$Petal.Length[i],data_train$Petal.Width[i]),1,4)
        
        #矩阵相乘 %*%
        #得到经过第一个隐藏层的输出,也就是最后输出层的输入
        x2=f(x1%*%w1-a1)
        
        #得到最后的输出层,是1*3的矩阵
        x3=f(x2%*%w2-a2)
        
        #把标签变成1*3的矩阵
        y=matrix(0.1,1,3)
        if(data_train$Species[i]=="setosa" ){y[1]=0.9}
        if(data_train$Species[i]=="versicolor"){y[2]=0.9}
        if(data_train$Species[i]=="virginica"){y[3]=0.9}  
        #与标签比较调参,输出层的误差项为output*(1-output)*(y-output)
        #print(y)
        #print(data_train$Species[i])
        cha=x3*(1-x3)*(y-x3)
        #cat("loss",mean(cha))
        #print("")
        #更新隐藏层和输出层之间的w2,dw2=w2+learning_rate * cha * xi
        #通过x2(竖着),t-o(横着)相乘得到5*3的矩阵和w2相加来更新
        tx2=t(x2)
        dw2=learning_rate * (tx2 %*% cha)
        #把之前的w2存下来,之后更新要用
        before_w2=w2
        w2=w2+dw2
        
        #更新输入层和隐藏层之间的w1,dw1=w1+learning_rate * cha * xi
        #隐藏层的误差项不是直接得到的,需要通过后一层的误差项计算,为(和(cha1*wi))
        #隐藏层的误差项为 w2(5*3) %*% cha(3*1),的cha2(5*1),注:用的是每更新前的w2
        cha2 = before_w2 %*% t(cha)
        tx1=t(x1)
        dw1=learning_rate * (tx1 %*% t(cha2))
        w1=w1+dw1
        
      }
    }
    
    #看看拟合度
    SUM=length(data_train$Species)
    right=0
    for(i in 1:length(data_train$Species))
    {
      #1*4
      x1=matrix(c(data_train$Sepal.Length[i],data_train$Sepal.Width[i],data_train$Petal.Length[i],data_train$Petal.Width[i]),1,4)
      
      #矩阵相乘 %*%
      #得到经过第一个隐藏层的输出,也就是最后输出层的输入
      x2=f(x1%*%w1-a1)
      
      #得到最后的输出层,是1*3的矩阵
      x3=f(x2%*%w2-a2)
      print(x3)
      
      y1=matrix(c(0.9,0.1,0.1),1,3)
      y2=matrix(c(0.1,0.9,0.1),1,3)
      y3=matrix(c(0.1,0.1,0.9),1,3)
      
      # cha11=x3*(1-x3)*(y1-x3)
      # cha22=x3*(1-x3)*(y2-x3)
      # cha33=x3*(1-x3)*(y3-x3)
      cha11=(y1-x3)
      cha22=(y2-x3)
      cha33=(y3-x3)
      
      cha1=0
      cha2=0
      cha3=0
      
      for(j in 1:3)
      {
        cha1=cha1+abs(cha11[j])
        cha2=cha2+abs(cha22[j])
        cha3=cha3+abs(cha33[j])
      }
    
      micha=min(cha1,cha2,cha3)
      #cat("micha",micha,"
    ")
      #cat("cha1",cha1,"
    ")
      #cat("cha2",cha2,"
    ")
      #cat("cha3",cha3,"
    ")
      
      
      if(micha==cha1 & data_train$Species[i]=="setosa") {print(1)
        right=right+1}
      if(micha==cha2 & data_train$Species[i]=="versicolor"){print(2)
        right=right+1}
      if(micha==cha3 & data_train$Species[i]=="virginica") {print(3)
        right=right+1}
    }
    
    print("拟合度为:")
    print((right*1.0/SUM))
    print("sum")
    print(SUM)
    print("right")
    print(right)
    
    #训练结束,看看参数
    print("w1")
    print(w1)
    print("w2")
    print(w2)
    
    data_test$Sepal.Length=(data_test$Sepal.Length-min(data_test$Sepal.Length))*1.0/
      (max(data_test$Sepal.Length)-min(data_test$Sepal.Length))
    
    data_test$Sepal.Width=(data_test$Sepal.Width-min(data_test$Sepal.Width))*1.0/
      (max(data_test$Sepal.Width)-min(data_test$Sepal.Width))
    
    data_test$Petal.Length=(data_test$Petal.Length-min(data_test$Petal.Length))*1.0/
      (max(data_test$Petal.Length)-min(data_test$Petal.Length))
    
    data_test$Petal.Width=(data_test$Petal.Width-min(data_test$Petal.Width))*1.0/
      (max(data_test$Petal.Width)-min(data_test$Petal.Width))
    
    #用测试数据测试一下准确率如何
    
    SUM=length(data_test$Species)
    right=0
    
    for(i in 1:length(data_test$Species))
    {
      #1*4
      x1=matrix(c(data_test$Sepal.Length[i],data_test$Sepal.Width[i],data_test$Petal.Length[i],data_test$Petal.Width[i]),1,4)
      
      #矩阵相乘 %*%
      #得到经过第一个隐藏层的输出,也就是最后输出层的输入
      x2=f(x1%*%w1-a1)
      
      #得到最后的输出层,是1*3的矩阵
      x3=f(x2%*%w2-a2)
      
      cha11=(y1-x3)
      cha22=(y2-x3)
      cha33=(y3-x3)
      
      cha1=0
      cha2=0
      cha3=0
      
      for(j in 1:3)
      {
        cha1=cha1+abs(cha11[j])
        cha2=cha2+abs(cha22[j])
        cha3=cha3+abs(cha33[j])
      }
      
      micha=min(cha1,cha2,cha3)
      #cat("micha",micha,"
    ")
      #cat("cha1",cha1,"
    ")
      #cat("cha2",cha2,"
    ")
      #cat("cha3",cha3,"
    ")
      
      
      if(micha==cha1 & data_test$Species[i]=="setosa") {print(1)
        right=right+1}
      if(micha==cha2 & data_test$Species[i]=="versicolor"){print(2)
        right=right+1}
      if(micha==cha3 & data_test$Species[i]=="virginica") {print(3)
        right=right+1}
    }
    
    print("accuracy:")
    print((right*1.0/SUM))
    cat("right",right)
    print("")
    cat("SUM",SUM)

    2. 带包实现BP人工神经完成iris

     

    rm(list=ls())
    #install.packages("sampling")
    library(nnet) 
    library(sampling)
    
    setwd("C:/Users/Administrator/Desktop/R???????????ฺพ???าต/สต??4-?หน?????????")
    
    data("iris")
    
    iris$Sepal.Length=(iris$Sepal.Length-min(iris$Sepal.Length))*1.0/
      (max(iris$Sepal.Length)-min(iris$Sepal.Length))
    
    iris$Sepal.Width=(iris$Sepal.Width-min(iris$Sepal.Width))*1.0/
      (max(iris$Sepal.Width)-min(iris$Sepal.Width))
    
    iris$Petal.Length=(iris$Petal.Length-min(iris$Petal.Length))*1.0/
      (max(iris$Petal.Length)-min(iris$Petal.Length))
    
    iris$Petal.Width=(iris$Petal.Width-min(iris$Petal.Width))*1.0/
      (max(iris$Petal.Width)-min(iris$Petal.Width))
    
    
    n=round(3/5*nrow(iris)/3)
    
    sub_train=strata(iris,stratanames=("Species"),size=rep(n,3),method="srswor")
    head(sub_train)
    colnames(iris)<-c("x1","x2","x3","x4","y")
    
    data_train=iris[sub_train$ID_unit,]
    data_test=iris[-sub_train$ID_unit,]
    dim(data_train)
    dim(data_test)
    
    model1=nnet(y~.,data=data_train,size=6,decay=5e-5,maxit=1000)
    pred=predict(model1,data_test[,1:4],type="class")
    P=sum(as.numeric(pred==data_test$y))/nrow(data_test)
    cat("accuracy",P*100,"%
    ")
    table(data_test$y,pred)
  • 相关阅读:
    牛客题霸NC15求二叉树的层序遍历Java题解
    牛客题霸NC105二分查找Java题解
    .net基本性本能优化问题
    urlRewriter
    元数据MetaData
    asp.net整体运行机制详解
    asp.net整体运行机制
    asp.net页面从初始化到卸载事件顺序
    Spring表达式语言之SpEL
    Spring 学习笔记(三)之注解
  • 原文地址:https://www.cnblogs.com/caiyishuai/p/9928186.html
Copyright © 2011-2022 走看看