G Solutions ch. 8 - Artificial neural networks

Solutions to exercises of chapter 8.

G.1 Exercise 1

library("neuralnet")
 
#To create a neural network to perform square root

#Generate 50 random numbers uniformly distributed between 0 and 100
#And store them as a dataframe
traininginput <-  as.data.frame(runif(50, min=0, max=100))
trainingoutput <- sqrt(traininginput)
 
#Column bind the data into one variable
trainingdata <- cbind(traininginput,trainingoutput)
colnames(trainingdata) <- c("Input","Output")
 
#Train the neural network
#Will have 10 hidden layers
#Threshold is a numeric value specifying the threshold for the partial
#derivatives of the error function as stopping criteria.
net.sqrt <- neuralnet(Output~Input,trainingdata, hidden=10, threshold=0.01)
print(net.sqrt)
## $call
## neuralnet(formula = Output ~ Input, data = trainingdata, hidden = 10, 
##     threshold = 0.01)
## 
## $response
##          Output
## 1  9.1477911396
## 2  8.6702719111
## 3  8.1989179492
## 4  7.9993923814
## 5  2.0727916597
## 6  7.6535181298
## 7  8.6077014995
## 8  6.1929611268
## 9  2.7394027085
## 10 9.8711584747
## 11 9.5719643063
## 12 7.3952514965
## 13 7.6891083490
## 14 7.0988722331
## 15 7.8050605937
## 16 7.6502501568
## 17 3.3695554823
## 18 6.1874068404
## 19 2.0751308936
## 20 0.2830496734
## 21 6.7499253468
## 22 3.4573739934
## 23 3.1299810238
## 24 9.3759345704
## 25 8.4565548523
## 26 2.7830164872
## 27 7.8906571215
## 28 4.8331967492
## 29 8.4832999953
## 30 4.8792809423
## 31 8.1410483948
## 32 3.9337631560
## 33 8.6604882915
## 34 4.7111391705
## 35 3.9369427221
## 36 3.6881311871
## 37 3.6893313685
## 38 9.7850843423
## 39 6.9199049657
## 40 1.0231754808
## 41 5.9054292953
## 42 2.2795702291
## 43 7.6832718902
## 44 6.0058968648
## 45 7.0797231158
## 46 2.8506310661
## 47 9.2517347986
## 48 8.1194567340
## 49 9.9216591432
## 50 5.8589684763
## 
## $covariate
##                 [,1]
##  [1,] 83.68208273314
##  [2,] 75.17361501232
##  [3,] 67.22225553822
##  [4,] 63.99027847219
##  [5,]  4.29646526463
##  [6,] 58.57633976266
##  [7,] 74.09252510406
##  [8,] 38.35276751779
##  [9,]  7.50432719942
## [10,] 97.43976963218
## [11,] 91.62250068039
## [12,] 54.68974469695
## [13,] 59.12238720339
## [14,] 50.39398698136
## [15,] 60.91897087172
## [16,] 58.52632746100
## [17,] 11.35390414856
## [18,] 38.28400340863
## [19,]  4.30616822559
## [20,]  0.08011711761
## [21,] 45.56149218697
## [22,] 11.95343493018
## [23,]  9.79678120930
## [24,] 87.90814906824
## [25,] 71.51331997011
## [26,]  7.74518076796
## [27,] 62.26246980950
## [28,] 23.35979081690
## [29,] 71.96637880988
## [30,] 23.80738251377
## [31,] 66.27666896675
## [32,] 15.47449256759
## [33,] 75.00405744649
## [34,] 22.19483228400
## [35,] 15.49951799680
## [36,] 13.60231165309
## [37,] 13.61116594635
## [38,] 95.74787558522
## [39,] 47.88508473430
## [40,]  1.04688806459
## [41,] 34.87409516238
## [42,]  5.19644042943
## [43,] 59.03266693931
## [44,] 36.07079715002
## [45,] 50.12247939594
## [46,]  8.12609747518
## [47,] 85.59459678363
## [48,] 65.92557765543
## [49,] 98.43932015356
## [50,] 34.32751160581
## 
## $model.list
## $model.list$response
## [1] "Output"
## 
## $model.list$variables
## [1] "Input"
## 
## 
## $err.fct
## function (x, y) 
## {
##     1/2 * (y - x)^2
## }
## <environment: 0x7f9d7caa6b10>
## attr(,"type")
## [1] "sse"
## 
## $act.fct
## function (x) 
## {
##     1/(1 + exp(-x))
## }
## <environment: 0x7f9d7caa6b10>
## attr(,"type")
## [1] "logistic"
## 
## $linear.output
## [1] TRUE
## 
## $data
##             Input       Output
## 1  83.68208273314 9.1477911396
## 2  75.17361501232 8.6702719111
## 3  67.22225553822 8.1989179492
## 4  63.99027847219 7.9993923814
## 5   4.29646526463 2.0727916597
## 6  58.57633976266 7.6535181298
## 7  74.09252510406 8.6077014995
## 8  38.35276751779 6.1929611268
## 9   7.50432719942 2.7394027085
## 10 97.43976963218 9.8711584747
## 11 91.62250068039 9.5719643063
## 12 54.68974469695 7.3952514965
## 13 59.12238720339 7.6891083490
## 14 50.39398698136 7.0988722331
## 15 60.91897087172 7.8050605937
## 16 58.52632746100 7.6502501568
## 17 11.35390414856 3.3695554823
## 18 38.28400340863 6.1874068404
## 19  4.30616822559 2.0751308936
## 20  0.08011711761 0.2830496734
## 21 45.56149218697 6.7499253468
## 22 11.95343493018 3.4573739934
## 23  9.79678120930 3.1299810238
## 24 87.90814906824 9.3759345704
## 25 71.51331997011 8.4565548523
## 26  7.74518076796 2.7830164872
## 27 62.26246980950 7.8906571215
## 28 23.35979081690 4.8331967492
## 29 71.96637880988 8.4832999953
## 30 23.80738251377 4.8792809423
## 31 66.27666896675 8.1410483948
## 32 15.47449256759 3.9337631560
## 33 75.00405744649 8.6604882915
## 34 22.19483228400 4.7111391705
## 35 15.49951799680 3.9369427221
## 36 13.60231165309 3.6881311871
## 37 13.61116594635 3.6893313685
## 38 95.74787558522 9.7850843423
## 39 47.88508473430 6.9199049657
## 40  1.04688806459 1.0231754808
## 41 34.87409516238 5.9054292953
## 42  5.19644042943 2.2795702291
## 43 59.03266693931 7.6832718902
## 44 36.07079715002 6.0058968648
## 45 50.12247939594 7.0797231158
## 46  8.12609747518 2.8506310661
## 47 85.59459678363 9.2517347986
## 48 65.92557765543 8.1194567340
## 49 98.43932015356 9.9216591432
## 50 34.32751160581 5.8589684763
## 
## $net.result
## $net.result[[1]]
##            [,1]
## 1  9.1561005960
## 2  8.6767485837
## 3  8.2000898603
## 4  7.9983926684
## 5  2.0736076685
## 6  7.6497841098
## 7  8.6135605771
## 8  6.1946833239
## 9  2.7392747021
## 10 9.8614883874
## 11 9.5740023080
## 12 7.3906426586
## 13 7.6855781825
## 14 7.0945639607
## 15 7.8023258389
## 16 7.6464984000
## 17 3.3704632169
## 18 6.1891676079
## 19 2.0758916887
## 20 0.2831259379
## 21 6.7474402624
## 22 3.4578595344
## 23 3.1314664384
## 24 9.3821271941
## 25 8.4607526394
## 26 2.7832058647
## 27 7.8886297360
## 28 4.8339830893
## 29 8.4878044307
## 30 4.8803806596
## 31 8.1415612314
## 32 3.9318678151
## 33 8.6668719383
## 34 4.7111060708
## 35 3.9350375850
## 36 3.6873313679
## 37 3.6885249957
## 38 9.7795326935
## 39 6.9163710559
## 40 1.0231909761
## 41 5.9087992229
## 42 2.2774873685
## 43 7.6797069597
## 44 6.0087829616
## 45 7.0754784875
## 46 2.8512629577
## 47 9.2593970797
## 48 8.1197288044
## 49 9.9092630424
## 50 5.8625223670
## 
## 
## $weights
## $weights[[1]]
## $weights[[1]][[1]]
##               [,1]         [,2]          [,3]          [,4]           [,5]
## [1,] -0.6550845859 -41.40577804 1.02204394976 -0.3402648497 -0.47866668514
## [2,] -1.4577288764  74.64272611 0.07113647863 -0.2739511833 -0.07436233925
##                [,6]           [,7]           [,8]           [,9]
## [1,]  0.03766131376 -1.51071270379 -0.94391660476 -0.98577927036
## [2,] -0.03044954178  0.02201121792  0.01993977166  0.02026751079
##               [,10]
## [1,] -0.24695684533
## [2,]  0.07891045038
## 
## $weights[[1]][[2]]
##                 [,1]
##  [1,] -0.59509000035
##  [2,] -2.04734727281
##  [3,]  0.02039209456
##  [4,]  1.20409571593
##  [5,] -2.29454992230
##  [6,] -1.35630588396
##  [7,] -2.63559433565
##  [8,]  3.78004998048
##  [9,]  3.65005195624
## [10,]  2.70512565292
## [11,]  2.25872733332
## 
## 
## 
## $startweights
## $startweights[[1]]
## $startweights[[1]][[1]]
##               [,1]         [,2]           [,3]         [,4]         [,5]
## [1,]  0.5648093040 1.4305884731  1.93800912780  0.172604920 -1.277411051
## [2,] -0.8939597573 0.6735289213 -0.01906270559 -1.496592309 -0.832724003
##                [,6]          [,7]          [,8]           [,9]
## [1,] -0.62390149385 -0.1506002373 0.02710852258 -0.05985671553
## [2,]  0.07762184557  0.4238784533 0.92150381616  0.27761102271
##              [,10]
## [1,]  1.1257278277
## [2,] -0.2685427164
## 
## $startweights[[1]][[2]]
##                [,1]
##  [1,] -1.1627581077
##  [2,]  0.3780231340
##  [3,] -0.7920250320
##  [4,]  0.1483080501
##  [5,] -1.9950677086
##  [6,] -0.5638924224
##  [7,] -1.9990252838
##  [8,]  0.1304480922
##  [9,]  2.1663205889
## [10,]  1.2213958212
## [11,]  1.2028387629
## 
## 
## 
## $generalized.weights
## $generalized.weights[[1]]
##                 [,1]
## 1   -0.0007290416977
## 2   -0.0008739404061
## 3   -0.0010448153295
## 4   -0.0011278532667
## 5   -0.1057830395664
## 6   -0.0012912605428
## 7   -0.0008948513768
## 8   -0.0024914893477
## 9   -0.0385968579442
## 10  -0.0005496871904
## 11  -0.0006187636773
## 12  -0.0014333503853
## 13  -0.0012731420436
## 14  -0.0016237633457
## 15  -0.0012162974872
## 16  -0.0012929407086
## 17  -0.0184920766301
## 18  -0.0024986942640
## 19  -0.1053488809987
## 20   4.6290490770359
## 21  -0.0018969859396
## 22  -0.0169275092197
## 23  -0.0239312247906
## 24  -0.0006678069206
## 25  -0.0009474734556
## 26  -0.0364544548598
## 27  -0.0011763442452
## 28  -0.0056198686099
## 29  -0.0009379359399
## 30  -0.0054478090294
## 31  -0.0010681573708
## 32  -0.0109918063557
## 33  -0.0008771778011
## 34  -0.0061101131438
## 35  -0.0109627009162
## 36  -0.0136066447346
## 37  -0.0135918587207
## 38  -0.0005688797837
## 39  -0.0017562970528
## 40 -23.4634744115716
## 41  -0.0029066856510
## 42  -0.0749866826347
## 43  -0.0012760909710
## 44  -0.0027512985138
## 45  -0.0016372527020
## 46  -0.0334288869172
## 47  -0.0007005619034
## 48  -0.0010770170684
## 49  -0.0005386721971
## 50  -0.0029826661039
## 
## 
## $result.matrix
##                                         1
## error                     0.0004025972619
## reached.threshold         0.0092013463806
## steps                  6646.0000000000000
## Intercept.to.1layhid1    -0.6550845858578
## Input.to.1layhid1        -1.4577288764482
## Intercept.to.1layhid2   -41.4057780435541
## Input.to.1layhid2        74.6427261072863
## Intercept.to.1layhid3     1.0220439497575
## Input.to.1layhid3         0.0711364786327
## Intercept.to.1layhid4    -0.3402648497203
## Input.to.1layhid4        -0.2739511833356
## Intercept.to.1layhid5    -0.4786666851419
## Input.to.1layhid5        -0.0743623392505
## Intercept.to.1layhid6     0.0376613137568
## Input.to.1layhid6        -0.0304495417765
## Intercept.to.1layhid7    -1.5107127037935
## Input.to.1layhid7         0.0220112179184
## Intercept.to.1layhid8    -0.9439166047620
## Input.to.1layhid8         0.0199397716636
## Intercept.to.1layhid9    -0.9857792703616
## Input.to.1layhid9         0.0202675107928
## Intercept.to.1layhid10   -0.2469568453307
## Input.to.1layhid10        0.0789104503762
## Intercept.to.Output      -0.5950900003472
## 1layhid.1.to.Output      -2.0473472728078
## 1layhid.2.to.Output       0.0203920945643
## 1layhid.3.to.Output       1.2040957159272
## 1layhid.4.to.Output      -2.2945499223029
## 1layhid.5.to.Output      -1.3563058839603
## 1layhid.6.to.Output      -2.6355943356508
## 1layhid.7.to.Output       3.7800499804802
## 1layhid.8.to.Output       3.6500519562359
## 1layhid.9.to.Output       2.7051256529202
## 1layhid.10.to.Output      2.2587273333164
## 
## attr(,"class")
## [1] "nn"
#Plot the neural network
plot(net.sqrt)
 
#Test the neural network on some training data
testdata <- as.data.frame((1:10)^2) #Generate some squared numbers
net.results <- compute(net.sqrt, testdata) #Run them through the neural network
 
#See what properties net.sqrt has
ls(net.results)
## [1] "net.result" "neurons"
#see the results
print(net.results$net.result)
##               [,1]
##  [1,] 0.9967202236
##  [2,] 2.0028112213
##  [3,] 3.0013049659
##  [4,] 3.9979296099
##  [5,] 5.0019004664
##  [6,] 6.0029175452
##  [7,] 6.9960715431
##  [8,] 7.9990063922
##  [9,] 9.0084635012
## [10,] 9.9828971447
#Display a better version of the results
cleanoutput <- cbind(testdata,sqrt(testdata),
                         as.data.frame(net.results$net.result))
colnames(cleanoutput) <- c("Input","Expected Output","Neural Net Output")
print(cleanoutput)
##    Input Expected Output Neural Net Output
## 1      1               1      0.9967202236
## 2      4               2      2.0028112213
## 3      9               3      3.0013049659
## 4     16               4      3.9979296099
## 5     25               5      5.0019004664
## 6     36               6      6.0029175452
## 7     49               7      6.9960715431
## 8     64               8      7.9990063922
## 9     81               9      9.0084635012
## 10   100              10      9.9828971447

Acknowledgement: this example excercise was from http://gekkoquant.com/2012/05/26/neural-networks-with-r-simple-example/