R语言神经网络与深度学习(一)
#R语言神经网络与深度学习(一)
#画出ReLU函数
x=seq(-1,1,0.1) #生成x变量,赋值-1~1等差数列
relu=function(x) ifelse(x>0,x,0) #定义relu函数
plot(x,relu(x),type="l") #画出函数
text(0.6,0.4,"ReLU(x)") #添加文字说明
#感知器模型
install.packages("neuralnet") #神经网络基本包
library(neuralnet)
#先看一个神经网络完成非线性回归的例子
traininginput=as.data.frame(runif(50,min = 1,max = 100))#生成1~100之间的50个随机数作为训练输入
trainingoutput=log(traininginput)
traindata=cbind(traininginput,trainingoutput) #按列绑定输入输出构成训练集
colnames(traindata)=c("input","output") #设定训练数据的属性名
#网络包含输入层+隐层+输出层,隐层带有10个神经元
#指定损失函数阈值0.01
net.log=neuralnet(output~input,traindata,hidden = 10,threshold = 0.01)
print(net.log) #输出结果非常丰富,包含列神经网络各层结点之间的权重,以及拟合效果等
$call
neuralnet(formula = output ~ input, data = traindata, hidden = 10,
threshold = 0.01)
$response
output
1 3.7636802
2 4.4644749
3 1.7436266
4 4.4739950
5 4.5343469
6 4.3922093
7 3.1241007
8 4.1268159
9 4.5423753
10 4.0774836
11 4.0416826
12 4.5609231
13 1.9013686
14 4.0447385
15 3.9645057
16 4.2647878
17 4.1592951
18 4.0911714
19 4.1509276
20 4.0576335
21 4.3243332
22 3.9223834
23 0.2055446
24 3.7957673
25 4.5556668
26 4.0531594
27 3.7693452
28 4.0053732
29 2.7642808
30 3.8549792
31 2.9460532
32 2.6809099
33 2.9693048
34 3.9097673
35 4.3624009
36 3.8901353
37 3.7667034
38 2.9365815
39 4.6030032
40 4.5424663
41 4.6006236
42 4.4700970
43 4.2985708
44 4.6039118
45 2.4092103
46 3.8176258
47 2.1870567
48 4.4869113
49 4.3311433
50 4.4543536
$covariate
[1,] 43.106777
[2,] 86.875399
[3,] 5.718043
[4,] 87.706411
[5,] 93.162653
[6,] 80.818776
[7,] 22.739436
[8,] 61.980258
[9,] 93.913611
[10,] 58.996821
[11,] 56.922039
[12,] 95.671757
[13,] 6.695051
[14,] 57.096255
[15,] 52.694217
[16,] 71.149822
[17,] 64.026377
[18,] 59.809912
[19,] 63.492872
[20,] 57.837277
[21,] 75.515146
[22,] 50.520712
[23,] 1.228194
[24,] 44.512379
[25,] 95.170192
[26,] 57.579083
[27,] 43.351669
[28,] 54.892306
[29,] 15.867625
[30,] 47.227636
[31,] 19.030695
[32,] 14.598370
[33,] 19.478373
[34,] 49.887342
[35,] 78.445248
[36,] 48.917503
[37,] 43.237292
[38,] 18.851293
[39,] 99.783536
[40,] 93.922153
[41,] 99.546369
[42,] 87.365201
[43,] 73.594536
[44,] 99.874243
[45,] 11.125173
[46,] 45.496064
[47,] 8.908953
[48,] 88.846600
[49,] 76.031160
[50,] 86.000542
$model.list
$model.list$response
[1] "output"
$model.list$variables
[1] "input"
$err.fct
function (x, y)
{
1/2 * (y - x)^2
}
<bytecode: 0x0000000012dc12e0>
<environment: 0x00000000210a1c48>
attr(,"type")
[1] "sse"
$act.fct
function (x)
{
1/(1 + exp(-x))
}
<bytecode: 0x0000000012dc5ee8>
<environment: 0x00000000210a20e0>
attr(,"type")
[1] "logistic"
$linear.output
[1] TRUE
$data
input output
1 43.106777 3.7636802
2 86.875399 4.4644749
3 5.718043 1.7436266
4 87.706411 4.4739950
5 93.162653 4.5343469
6 80.818776 4.3922093
7 22.739436 3.1241007
8 61.980258 4.1268159
9 93.913611 4.5423753
10 58.996821 4.0774836
11 56.922039 4.0416826
12 95.671757 4.5609231
13 6.695051 1.9013686
14 57.096255 4.0447385
15 52.694217 3.9645057
16 71.149822 4.2647878
17 64.026377 4.1592951
18 59.809912 4.0911714
19 63.492872 4.1509276
20 57.837277 4.0576335
21 75.515146 4.3243332
22 50.520712 3.9223834
23 1.228194 0.2055446
24 44.512379 3.7957673
25 95.170192 4.5556668
26 57.579083 4.0531594
27 43.351669 3.7693452
28 54.892306 4.0053732
29 15.867625 2.7642808
30 47.227636 3.8549792
31 19.030695 2.9460532
32 14.598370 2.6809099
33 19.478373 2.9693048
34 49.887342 3.9097673
35 78.445248 4.3624009
36 48.917503 3.8901353
37 43.237292 3.7667034
38 18.851293 2.9365815
39 99.783536 4.6030032
40 93.922153 4.5424663
41 99.546369 4.6006236
42 87.365201 4.4700970
43 73.594536 4.2985708
44 99.874243 4.6039118
45 11.125173 2.4092103
46 45.496064 3.8176258
47 8.908953 2.1870567
48 88.846600 4.4869113
49 76.031160 4.3311433
50 86.000542 4.4543536
$exclude
NULL
$net.result
$net.result[[1]]
[,1]
[1,] 3.7611324
[2,] 4.4676382
[3,] 1.7450071
[4,] 4.4767355
[5,] 4.5333355
[6,] 4.3973251
[7,] 3.1264467
[8,] 4.1283555
[9,] 4.5407127
[10,] 4.0778398
[11,] 4.0412524
[12,] 4.5576071
[13,] 1.9011417
[14,] 4.0443723
[15,] 3.9627180
[16,] 4.2694251
[17,] 4.1616376
[18,] 4.0918471
[19,] 4.1530636
[20,] 4.0575438
[21,] 4.3296645
[22,] 3.9200950
[23,] 0.2058114
[24,] 3.7930785
[25,] 4.5528409
[26,] 4.0529725
[27,] 3.7667663
[28,] 4.0042391
[29,] 2.7649095
[30,] 3.8522756
[31,] 2.9476404
[32,] 2.6810941
[33,] 2.9710108
[34,] 3.9073649
[35,] 4.3677722
[36,] 3.8875888
[37,] 3.7641387
[38,] 2.9381196
[39,] 4.5951146
[40,] 4.5407961
[41,] 4.5930255
[42,] 4.4730158
[43,] 4.3036824
[44,] 4.5959112
[45,] 2.4079249
[46,] 3.8148925
[47,] 2.1852585
[48,] 4.4890086
[49,] 4.3365084
[50,] 4.4579205
$weights
$weights[[1]]
$weights[[1]][[1]]
[,1] [,2] [,3] [,4] [,5]
[1,] 0.44219842 3.240757 3.202168 -0.7877108 0.40986679
[2,] 0.03712114 3.187768 1.834951 0.1106201 -0.02969351
[,6] [,7] [,8] [,9] [,10]
[1,] -0.63775130 0.3209541 1.088130 -1.7936935 2.897821
[2,] 0.02159582 -0.2508112 -1.443933 0.7005957 3.665216
$weights[[1]][[2]]
[,1]
[1,] 0.97762799
[2,] 1.18062819
[3,] -0.66380527
[4,] -0.01894856
[5,] 1.22171422
[6,] -0.94174505
[7,] 1.95836189
[8,] -1.71540135
[9,] -1.58353932
[10,] 0.49503450
[11,] -0.11677912
$generalized.weights
$generalized.weights[[1]]
[,1]
[1,] -0.0022210615
[2,] -0.0007116606
[3,] -0.1330105041
[4,] -0.0006983659
[5,] -0.0006173567
[6,] -0.0008171924
[7,] -0.0066323721
[8,] -0.0012800595
[9,] -0.0006070058
[10,] -0.0013815810
[11,] -0.0014591160
[12,] -0.0005834740
[13,] -0.0863911855
[14,] -0.0014523614
[15,] -0.0016390983
[16,] -0.0010236995
[17,] -0.0012162685
[18,] -0.0013528213
[19,] -0.0012324843
[20,] -0.0014241447
[21,] -0.0009238604
[22,] -0.0017459440
[23,] 4.5417733837
[24,] -0.0021141160
[25,] -0.0005900886
[26,] -0.0014338833
[27,] -0.0022017184
[28,] -0.0015414536
[29,] -0.0129830198
[30,] -0.0019321744
[31,] -0.0092002434
[32,] -0.0152801351
[33,] -0.0088113650
[34,] -0.0017792936
[35,] -0.0008631226
[36,] -0.0018325209
[37,] -0.0022107137
[38,] -0.0093639779
[39,] -0.0005320635
[40,] -0.0006068891
[41,] -0.0005348975
[42,] -0.0007037923
[43,] -0.0009663184
[44,] -0.0005309836
[45,] -0.0266392482
[46,] -0.0020447419
[47,] -0.0432961417
[48,] -0.0006805536
[49,] -0.0009128212
[50,] -0.0007259494
$startweights
$startweights[[1]]
$startweights[[1]][[1]]
[,1] [,2] [,3] [,4] [,5]
[1,] -0.03350376 0.2343721 0.9467223 0.6239159 -0.4009166
[2,] 0.18488311 0.5129832 0.3752907 0.2825048 -0.6657770
[,6] [,7] [,8] [,9] [,10]
[1,] -0.5012463 -0.5600915 0.9140493 -1.970049 -0.1085638
[2,] 0.1647918 -1.1582757 -1.4669017 1.260016 0.6588312
$startweights[[1]][[2]]
[,1]
[1,] 0.6881827
[2,] 0.7268674
[3,] -0.9532505
[4,] -0.3083938
[5,] 0.9051658
[6,] -0.2535406
[7,] 0.7907308
[8,] -0.2203825
[9,] 1.0148172
[10,] 0.2076412
[11,] -0.4062244
$result.matrix
[,1]
error 2.551317e-04
reached.threshold 8.599877e-03
steps 1.757000e+03
Intercept.to.1layhid1 4.421984e-01
input.to.1layhid1 3.712114e-02
Intercept.to.1layhid2 3.240757e+00
input.to.1layhid2 3.187768e+00
Intercept.to.1layhid3 3.202168e+00
input.to.1layhid3 1.834951e+00
Intercept.to.1layhid4 -7.877108e-01
input.to.1layhid4 1.106201e-01
Intercept.to.1layhid5 4.098668e-01
input.to.1layhid5 -2.969351e-02
Intercept.to.1layhid6 -6.377513e-01
input.to.1layhid6 2.159582e-02
Intercept.to.1layhid7 3.209541e-01
input.to.1layhid7 -2.508112e-01
Intercept.to.1layhid8 1.088130e+00
input.to.1layhid8 -1.443933e+00
Intercept.to.1layhid9 -1.793694e+00
input.to.1layhid9 7.005957e-01
Intercept.to.1layhid10 2.897821e+00
input.to.1layhid10 3.665216e+00
Intercept.to.output 9.776280e-01
1layhid1.to.output 1.180628e+00
1layhid2.to.output -6.638053e-01
1layhid3.to.output -1.894856e-02
1layhid4.to.output 1.221714e+00
1layhid5.to.output -9.417451e-01
1layhid6.to.output 1.958362e+00
1layhid7.to.output -1.715401e+00
1layhid8.to.output -1.583539e+00
1layhid9.to.output 4.950345e-01
1layhid10.to.output -1.167791e-01
attr(,"class")
[1] "nn"
ls(net.log) #得到神经网络包含等属性,并且可以通过net.log$XXX直接访问内容
[1] "act.fct" "call"
[3] "covariate" "data"
[5] "err.fct" "exclude"
[7] "generalized.weights" "linear.output"
[9] "model.list" "net.result"
[11] "response" "result.matrix"
[13] "startweights" "weights"
net.log$act.fct #查看act.fct属性的结果,激活函数默认sigmoid
function (x)
{
1/(1 + exp(-x))
}
<bytecode: 0x0000000012dc5ee8>
<environment: 0x00000000210a20e0>
attr(,"type")
[1] "logistic"
plot(net.log) #画出神经网络
#神经网络计算效果案例
testdata=as.data.frame((1:10)^2) #测试能否准确计算一个数的对数值
net.result=compute(net.log,testdata) #输入测试数据,让训练好的神经网络完成运算
ls(net.result) #显示运算结果
[1] "net.result" "neurons"
net.results=compute(net.log,testdata) #输入测试数据,让训练好的神经网络完成运算
ls(net.results) #显示运算结果
[1] "net.result" "neurons"
#为了方便比较,把实际值和神经网络计算的值列一起
niceoutput=cbind(testdata,log(testdata),as.data.frame(net.results$net.result))
colnames(niceoutput)=c("input","expected output","neural net output") #输出结果列命名
print(niceoutput) #打印出结果
input expected output neural net output
1 1 0.000000 0.03171921
2 4 1.386294 1.39359903
3 9 2.197225 2.19541861
4 16 2.772589 2.77326131
5 25 3.218876 3.22133011
6 36 3.583519 3.58289693
7 49 3.891820 3.88928457
8 64 4.158883 4.16121537
9 81 4.394449 4.39953416
10 100 4.605170 4.59701350
#还可以计算实际值与预测值差(误差)的均方差
error=as.data.frame(net.results$net.result)-log(testdata) #得到预测误差
mse=sum((error)^2)/nrow(error) #计算均方误差
mse #展示结果,误差均方很小,预测非常准
[1] 0.0001173849
R语言神经网络与深度学习(一)相关推荐
- R语言 神经网络与深度学习(二)
分类 class.ind=function(cl) #输入cl是一个表示分类结果的向量{n=length(cl) cl=as.factor(cl) #cl转化为因子型x=matrix(0,n,leng ...
- R语言 关于h2o深度学习的一些心得
前言 笔者是在读研究生,用h2o的时间也有小半年了,心血来潮就想写一些自己的心得,希望对大家有所帮助. 之前学习的有些参考其他人的地方,如果有遗漏或侵权请私信,看到了立马加注明或删除! 怎么安装h2o ...
- 【神经网络与深度学习-TensorFlow实践】-中国大学MOOC课程(四)(Python语言基础(2))
[神经网络与深度学习-TensorFlow实践]-中国大学MOOC课程(四)(Python语言基础(2)) 第4讲 Python语言基础(2) 4.1 内置数据结构 4.1.1 序列数据结构(sequ ...
- AI学习笔记(九)从零开始训练神经网络、深度学习开源框架
AI学习笔记之从零开始训练神经网络.深度学习开源框架 从零开始训练神经网络 构建网络的基本框架 启动训练网络并测试数据 深度学习开源框架 深度学习框架 组件--张量 组件--基于张量的各种操作 组件- ...
- 邱锡鹏《神经网络与深度学习》第一章 绪论
目录 1.1 人工智能 1.1.1 人工智能的发展历史 1.1.2 人工智能的流派 1.2 人工神经网络 1.3 机器学习 1.4 表示学习 1.4.1 局部表示和分布式表示 1.4.2 表示学习 1 ...
- 《神经网络与深度学习》邱希鹏 学习笔记 (1)
<神经网络与深度学习>邱希鹏 学习笔记 (1) 完成进度 第一章 绪论 深度学习与神经网络 人工智能 图灵测试 达特茅斯 *(Dartmouth)* 会议 人工智能的研究领域 人工智能发展 ...
- 《神经网络与深度学习》nndl读书笔记
目录 本书的知识体系 深度学习与神经网络概要 人工智能 机器学习 机器学习模型步骤 表示学习 局部表示与分布式表示 深度学习 常用的深度学习框架 端到端学习 神经网络 人工神经网络 神经网络的发展历史 ...
- 结合语言知识和深度学习的中文文本情感分析方法
结合语言知识和深度学习的中文文本情感分析方法 徐康庭, 宋威 北方工业大学信息学院 摘要:在目前的中文文本情感分析研究中,基于语义规则和情感词典的方法通常需要人工设置情感阈值:而基于深度学习的方法由于 ...
- mxnet:结合R与GPU加速深度学习
转载于统计之都,http://cos.name/tag/dmlc/,作者陈天奇 ------------------------------------------------------------ ...
最新文章
- 最牛逼的性能监控系统!集强大功能于一身
- HDU6346(最小权值完美匹配)
- [你必须知道的.NET]第十一回:参数之惑---传递的艺术(上)
- 大名鼎鼎的红黑树,你get了么?2-3树 绝对平衡 右旋转 左旋转 颜色反转
- 2015年第六届蓝桥杯C/C++ A组国赛 —— 第五题:切开字符串
- __attribute__机制介绍
- python语言对嵌套if语句的规定是_Python语言程序设计(8)——if语句的嵌套
- gulp安装指定版本_对比webpack,你更应该先掌握gulp【10分钟教你彻底掌握gulp】
- 学习项目管理PRINCE2有什么用??
- java类静态初始化_Java静态代码块和类初始化、实例初始化过程
- mac下的mysql报错:ERROR 1045(28000)和ERROR 2002 (HY000)的解决办法
- SQLAlchemy数据库映射和使用
- 光棍节程序员闯关秀-解密
- 小学教师计算机国培培训总结,小学教师国培计划研修总结
- 关于RecyclerView 设置条目间距
- win10 计算机网络密码怎么设置,win10系统提示windows安全 输入网络密码的设置教程...
- gin框架的环境搭建和热加载
- writev遇到非阻塞IO
- 基于MobileNetv3实现人脸面部表情识别
- LIGO引力波探测原理
热门文章
- java实现团购功能_[Java教程]jquery组件团购倒计时功能
- ldo和dcdc功耗_深度解析DCDC和LDO各自的原理和区别
- python r转义_Python快速入门系列之二:还学不会我直播跪搓衣板
- Mybatis在Maven项目中使用
- 从趣味游戏到排序算法(2)
- 解决Redis修改bind域名绑定后出现Could not connect to Redis
- linux系统下获取IP,MAC,子网掩码,网关
- C# 杀掉指定进程
- JQ focus blur focusin focuseout
- CSS 魔法:学海无涯,而吾生有涯