0.2 Datasets to Test

0.3 NN Train Function

hyperParams <- function(optim_method) {
    
    if (!is.element(optim_method, c("adam", "rmsprop", "sgd", "adagrad", "adadelta"))) stop("Invalid Parameters.")
    
    hidden_activation = "tanh"
    iter <- 10000
    lr <- 0.001
    
    params <- paste0("method=", optim_method, "_iter=", iter, "_lr=", lr, "_hidden_activation=", hidden_activation)
    
    out <- list(hidden_activation = hidden_activation, iter = iter, lr = lr, params = params)
    
    return (out)
}


build_model <- function(optim_method, hidden_neur) {
    
    hyper_params <- hyperParams(optim_method)
    hidden_activation <- hyper_params$hidden_activation
    lr <- hyper_params$lr
    
    
    if (optim_method == "adam") { op <- optimizer_adam(lr = lr)} 
    if (optim_method == "rmsprop") { op <- optimizer_rmsprop(lr = lr)}
    if (optim_method == "adagrad") { op <- optimizer_adagrad(lr = lr)}
    if (optim_method == "adadelta") { op <- optimizer_adadelta(lr = lr)}
    if (optim_method == "sgd") { op <- optimizer_sgd(lr = lr)}

    model <- keras_model_sequential() %>%
        layer_dense(units = hidden_neur, activation = "tanh", input_shape = ncol(x)) %>%
        layer_dense(units = 1)

    model %>% compile(
        loss = "mse",
        optimizer = op,
        metrics = list("mean_absolute_error")
    )
    return (model)
}


NNtrain <- function(x, y, model, iter, optim_method) {
        
    hyper_params <- hyperParams(optim_method)
    iter <- hyper_params$iter
    early_stop <- callback_early_stopping(monitor = "val_loss", patience = 20, restore_best_weights = TRUE, mode = "auto", min_delta = 0.00001)

    NNreg <- model %>% fit(x, y, epochs = iter, verbose = 0, callbacks = list(early_stop))

    return (NNreg)
}

0.4 Main Loop

for (dset in names(NNdatasets)) {

    ## =============================================
    ## EXTRACT INFORMATION FROM THE SELECTED DATASET
    ## =============================================
    ds     <- NNdatasets[[dset]]$ds
    Z      <- NNdatasets[[dset]]$Z
    neur   <- NNdatasets[[dset]]$neur
    nparNN <- NNdatasets[[dset]]$nparNN
    fmlaNN <- NNdatasets[[dset]]$fmlaNN
    donotremove  <- c("dset", "dsets", "ds", "Z", "neur", "TF", "nrep", "timer",
                      "donotremove", "donotremove2")
    donotremove2 <- c("dset", "dsets") 

    ## ===================================================
    ## SELECT THE FORMAT REQUIRED BY THE PACKAGE/ALGORITHM
    ## d = data.frame, m = matrix, v = vector/numeric
    ## ATTACH THE OBJECTS CREATED (x, y, Zxy, ... )
    ## ===================================================
    ZZ     <- prepareZZ(Z, xdmv = "m", ydmv = "m", zdm = "d", scale = TRUE)
    attach(ZZ)

    ## =============================================
    ## SELECT THE PACKAGE USED FOR TRAINING
    ## nrep => SELECT THE NUMBER OF INDEPENDANT RUNS
    ## iter => SELECT THE MAX NUMBER OF ITERATIONS
    ## TF   => PLOT THE RESULTS
    ## =============================================

    
    nrep   <- 10
    TF     <- TRUE 

    method <- c("adam", "rmsprop", "sgd", "adadelta", "adagrad")
        
    for (m in method) {
        
        descr  <- paste(dset, "keras::model_sequential", m, sep = "_")

        ## AUTO
        Ypred  <- list()
        Rmse   <- numeric(length = nrep)
        Mae    <- numeric(length = nrep)
    
        for(i in 1:nrep){
            event      <- paste0(descr, sprintf("_%.2d", i))
            timer$start(event)
            #### ADJUST THE FOLLOWING LINES TO THE PACKAGE::ALGORITHM
            
            hyper_params <- hyperParams(optim_method = m)
            model <- build_model(optim_method = m, hidden_neur = neur)

            NNreg      <- tryCatch(
                            NNtrain(x = x, y = y, model = model, hidden_neur = neur, optim_method = m),
                            error = function(y) {lm(y ~ 0, data = Zxy)}
                          )     
            y_pred     <- tryCatch(
                            ym0 + ysd0 * model %>% predict(x),
                            error = ym0
                          )     
            ####
            Ypred[[i]] <- y_pred
            Rmse[i]    <- funRMSE(y_pred, y0)
            Mae[i]     <- funMAE(y_pred, y0)
            timer$stop(event, RMSE = Rmse[i], MAE = Mae[i], params = hyper_params$params, printmsg = FALSE)
        }
        best <- which(Rmse == min(Rmse, na.rm = TRUE))[1]
        best ; Rmse[[best]]
        
        ## ================================================
        ## PLOT ALL MODELS AND THE MODEL WITH THE BEST RMSE
        ## par OPTIONS CAN BE IMPROVED FOR A BETTER DISPLAY
        ## ================================================
        op <- par(mfcol = c(1,2))
        plotNN(xory, y0, uni, TF, main = descr)
        for (i in 1:nrep) lipoNN(xory, Ypred[[i]], uni, TF, col = i, lwd = 1)
        
        plotNN(xory, y0, uni, TF, main = descr)
        lipoNN(xory, Ypred[[best]], uni, TF, col = 4, lwd = 4)
        par(op)
    }


## ===========================
## DETACH ZZ - END OF THE LOOP
## ===========================
    detach(ZZ)
}

0.5 Results

0.6 Best Results

dataset method minRMSE meanRMSE meanTime
mDette sequentialadadelta 8.3225 10.38877 0.381
sequentialadagrad 8.0414 10.66325 0.457
sequentialadam 8.5480 9.63737 0.764
sequentialrmsprop 8.7527 10.33107 0.232
sequentialsgd 7.7555 9.67049 0.350
mFriedman sequentialadadelta 0.2212 0.30273 0.797
sequentialadagrad 0.1922 0.27893 0.964
sequentialadam 0.1766 0.27962 0.548
sequentialrmsprop 0.1790 0.28190 0.616
sequentialsgd 0.2268 0.31924 0.711
mIshigami sequentialadadelta 3.5756 4.06556 1.252
sequentialadagrad 3.3737 4.12658 1.331
sequentialadam 4.1078 4.62108 1.026
sequentialrmsprop 3.5039 4.53777 1.051
sequentialsgd 3.4950 4.22239 1.172
mRef153 sequentialadadelta 15.2496 29.85035 1.660
sequentialadagrad 23.0972 34.74027 1.754
sequentialadam 21.2800 30.46301 1.375
sequentialrmsprop 20.1539 29.05061 1.429
sequentialsgd 22.4759 35.26933 1.596
uDmod1 sequentialadadelta 0.5717 0.65487 2.043
sequentialadagrad 0.5722 0.68235 2.111
sequentialadam 0.5850 0.62642 1.781
sequentialrmsprop 0.5586 0.66789 1.939
sequentialsgd 0.5672 0.63771 2.018
uDmod2 sequentialadadelta 0.4634 0.56354 2.486
sequentialadagrad 0.4509 0.53214 2.705
sequentialadam 0.4464 0.57099 2.201
sequentialrmsprop 0.4589 0.55503 2.312
sequentialsgd 0.4640 0.61697 2.400
uDreyfus1 sequentialadadelta 0.6896 1.75268 2.852
sequentialadagrad 0.8440 1.78233 3.306
sequentialadam 0.7853 1.50770 2.673
sequentialrmsprop 1.3065 2.08621 2.724
sequentialsgd 0.7269 1.37723 2.807
uDreyfus2 sequentialadadelta 0.7054 1.83426 3.461
sequentialadagrad 1.0120 1.77125 3.602
sequentialadam 0.7006 1.49801 3.288
sequentialrmsprop 0.8260 1.67217 3.272
sequentialsgd 0.7068 1.60306 3.432
uGauss1 sequentialadadelta 28.3249 41.40596 3.682
sequentialadagrad 28.3465 39.32356 3.753
sequentialadam 29.1418 39.99443 3.639
sequentialrmsprop 30.3215 42.36668 3.512
sequentialsgd 29.8925 43.86402 3.593
uGauss2 sequentialadadelta 29.0018 36.45131 4.209
sequentialadagrad 33.8363 45.28474 4.756
sequentialadam 29.0246 41.15258 4.008
sequentialrmsprop 31.9979 45.51794 4.243
sequentialsgd 28.8564 43.57858 4.086
uGauss3 sequentialadadelta 32.5226 43.35703 4.731
sequentialadagrad 32.7085 42.68767 4.880
sequentialadam 33.1187 54.67369 5.452
sequentialrmsprop 32.6185 43.69536 5.182
sequentialsgd 32.2845 39.32698 4.432
uNeuroOne sequentialadadelta 0.9694 1.33065 5.158
sequentialadagrad 0.9197 1.45676 5.186
sequentialadam 0.9538 1.42392 5.345
sequentialrmsprop 1.0223 1.51539 5.304
sequentialsgd 0.9970 1.48396 5.103