reuter nn-model

Published by onesixx on

##3# Build the model -----------------------------------------------------------
trn_Data %>% dim()      # [1] 8982 10000

## ` ` Setup the layers --------------------------------------------------------
# imdb     16 16 <1>   sigmoid
# reuters  64 64 <46>  softmax
model <- keras_model_sequential() %>% 
  layer_dense(units=64, activation="relu", input_shape=c(10000)) %>% 
  layer_dense(units=64, activation="relu") %>% 
  layer_dense(units=46, activation="softmax")

model %>% summary()
# _________________________________________________________________  
# Layer (type)       Output Shape   Param #   
# ================================================================= -
# dense_1 (Dense)    (None, 64)     640064    (10000*64) + 1*64
# _________________________________________________________________
# dense_2 (Dense)    (None, 64)       4160    (64 * 64) + 64  
# _________________________________________________________________
# dense_3 (Dense)    (None, 46)       2990    (64 * 46) + 46
# ================================================================= -
# Total params: 647,214
# Trainable params: 647,214
# Non-trainable params: 0
# _________________________________________________________________

## ` ` Compile the model -------------------------------------------------------
model %>% compile(
  optimizer="rmsprop",          #optimizer_rmsprop(lr=.001)
  loss="binary_crossentropy",   
  metrics=c("accuracy")         
)

##4# Train the model -----------------------------------------------------------
#  Validating your approach ~~~~~~~~
# validation set 
val_indices <- 1:1000       # 1000 from 8982
trn_Data_validate  <- trn_Data[ val_indices, ]
trn_Data_partial   <- trn_Data[-val_indices, ]
trn_Labels_validate<- trn_Labels[ val_indices,]
trn_Labels_partial <- trn_Labels[-val_indices,]

history <- model %>% fit(  # fit.keras.engine.training.Model
  x=trn_Data_partial,
  y=trn_Labels_partial,
  batch_size=512,
  epoches=30,
  validation_data= list(trn_Data_validate, trn_Labels_validate)
)
history %>% plot()

# train2 from beginning
history <- model %>% fit(  # fit.keras.engine.training.Model
  x=trn_Data,
  y=trn_Labels,
  batch_size=512,
  epoches=9,
  #validation_data= list(trn_Data_validate, trn_Labels_validate)
)

history %>% plot() 

##5# Evaluate accuracy ---------------------------------------------------------
# retraining with proper epoches
results <- model %>% evaluate(tst_Data, tst_Labels)
results

# random baseline
tstLabels_copy <- tstLabels
tstLabels_copy <- sample(tstLabels_copy)
length(which(tstLabels == tstLabels_copy))/ length(tstLabels)   # .195

##6# Make predictions ----------------------------------------------------------
pred <- model %>% predict(tst_Data)
dim(pred)
sum(pred[1,])       # sum 1
which.max(pred[1,]) # class 5

#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#  Further experiments
Categories: Keras

onesixx

Blog Owner

Subscribe
Notify of
guest

0 Comments
Oldest
Newest Most Voted
Inline Feedbacks
View all comments
0
Would love your thoughts, please comment.x
()
x