[R] Help interpreting libarary(nnet) script output..URGENT

cobbler_squad la.foma at gmail.com
Fri Jan 29 22:44:50 CET 2010


Hello, 
I am pretty new to R. I am working on neural network classifiers and I am
feeding the nnet input from different regions of interest (fMRI data). The
script that I am using is this:

library (MASS)
heap_lda <-
data.frame(as.matrix(t(read.table(file="R_10_5runs_matrix9.txt")))*100000,syll
= c(rep("heap",3),rep("hoop",3),rep("hop",3)))
library(nnet)
heap_nnet <- nnet(syll ~ ., data=heap_lda, size=12,iter=100,MaxNWts=10000)

predict(heap_nnet,heap_lda,type = "class")
table(predict(heap_nnet,heap_lda,type = "class"),heap_lda$syll)

# do leave-one-out crossvalidation...

heap_nnet.out<-NULL
all = c(1:9)

for(n in all){
  heap_nnet <- nnet(syll ~ ., data=heap_lda[all != n,], CV
=TRUE,size=12,iter=100,MaxNWts=10000)
   heap_nnet.out <- c(heap_nnet.out,predict(heap_nnet,heap_lda[all ==
n,],type = "class"))
}

table(heap_nnet.out,heap_lda$syll)

..the output I am receiving so far is fits in this structure..(this input is
from 1 Region of interest file)

> library(MASS) 
> heap_lda <-
> data.frame(as.matrix(t(read.table(file="R_10_5runs_matrix9.txt")))*100000,syll
> = c(rep("heap",3),rep("hoop",3),rep("hop",3)))
> library(nnet)
> heap_nnet <- nnet(syll ~ ., data=heap_lda, size=12,iter=100,MaxNWts=10000)

# weights:  1719
initial  value 10.469219 
iter  10 value 0.057269
iter  20 value 0.000276
final  value 0.000069 
converged
> 

> predict(heap_nnet,heap_lda,type = "class")
[1] "heap" "heap" "heap" "hoop" "hoop" "hoop" "hop"  "hop"  "hop" 

> table(predict(heap_nnet,heap_lda,type = "class"),heap_lda$syll)
      
       heap hoop hop
  heap    3    0   0
  hoop    0    3   0
  hop     0    0   3
> heap_nnet.out<-NULL
> all = c(1:9)
> 
> for(n in all){
+   heap_nnet <- nnet(syll ~ ., data=heap_lda[all != n,], CV
=TRUE,size=12,iter=100,MaxNWts=10000)
+    heap_nnet.out <- c(heap_nnet.out,predict(heap_nnet,heap_lda[all ==
n,],type = "class"))
+ }
# weights:  1719
initial  value 10.602879 
iter  10 value 1.417881
iter  20 value 1.387453
iter  30 value 1.386296
final  value 1.386294 
converged
# weights:  1719
initial  value 11.055741 
iter  10 value 0.096622
iter  20 value 0.000189
final  value 0.000060 
converged
# weights:  1719
initial  value 10.029384 
iter  10 value 0.046705
final  value 0.000063 
converged
# weights:  1719
initial  value 10.997292 
iter  10 value 0.011758
final  value 0.000086 
converged
# weights:  1719
initial  value 8.527452 
iter  10 value 0.019332
final  value 0.000060 
converged
# weights:  1719
initial  value 7.470868 
iter  10 value 0.016888
final  value 0.000085 
converged
# weights:  1719
initial  value 10.694363 
iter  10 value 0.000740
iter  20 value 0.000310
final  value 0.000057 
converged
# weights:  1719
initial  value 13.334826 
iter  10 value 0.032689
final  value 0.000091 
converged
# weights:  1719
initial  value 6.861594 
iter  10 value 0.008161
final  value 0.000081 
converged
> 
> table(heap_nnet.out,heap_lda$syll)
             
heap_nnet.out heap hoop hop
         heap    2    1   1
         hoop    0    1   0
         hop     1    1   2

I am having trouble understanding how to interpret the output. 
is my intuition correct and we are comparing the 
heap_nnet <- nnet(syll ~ ., data=heap_lda, size=12,iter=100,MaxNWts=10000) 
[[[[[# weights:  1719
initial  value 10.469219 
iter  10 value 0.057269
iter  20 value 0.000276
final  value 0.000069 
converged]]]]]

to the output of leave one out cross-validation? Is the better match the one
that goes through least iterations and arrives at the closest approximation
of the neural network classifier? General ideas/notes regarding this would
be greatly appreciated. Also, which number of weights is best, the one with
larger or the smaller number (given that our max_weights limit is set at
10000). 

I apologize for my lack of familiarity with this and the resulting stupid
questions.

Thanks.
-- 
View this message in context: http://n4.nabble.com/Help-interpreting-libarary-nnet-script-output-URGENT-tp1431725p1431725.html
Sent from the R help mailing list archive at Nabble.com.



More information about the R-help mailing list