1 Setup

1.1 Packages and options

library(NNbenchmark)
library(kableExtra)
library(dplyr)   
library(stringr) 
options(scipen = 999)
odir <- "D:/GSoC2020/Results/2020run04/"

2 Read csv files and calculate some statistics for the metrics

lf        <- lapply(list.files(odir, pattern = "-results.csv", full.names = TRUE), csv::as.csv)
names(lf) <- names(NNdatasets)
lf <- lf[c(1:4)] #selecting datasets
ht(lf)
## $mDette
##                               event   RMSE    MSE    MAE     WAE time
## 1    mDette_AMORE::train_ADAPTgd_01 2.2257 4.9536 1.7186 10.7671 0.14
## 2    mDette_AMORE::train_ADAPTgd_02 0.4455 0.1985 0.3156  2.0005 0.13
## 3    mDette_AMORE::train_ADAPTgd_03 0.4391 0.1928 0.3111  1.9947 0.14
## 318 mDette_yager::grnn.fit_none*_03 0.4741 0.2248 0.3193  1.9227 1.05
## 319 mDette_yager::grnn.fit_none*_04 0.4741 0.2248 0.3193  1.9227 1.04
## 320 mDette_yager::grnn.fit_none*_05 0.4741 0.2248 0.3193  1.9227 1.03
## 
## $mFriedman
##                                  event   RMSE    MSE    MAE    WAE time
## 1    mFriedman_AMORE::train_ADAPTgd_01 0.0393 0.0015 0.0272 0.1690 0.14
## 2    mFriedman_AMORE::train_ADAPTgd_02 0.0396 0.0016 0.0274 0.1771 0.13
## 3    mFriedman_AMORE::train_ADAPTgd_03 0.0296 0.0009 0.0235 0.1101 0.10
## 318 mFriedman_yager::grnn.fit_none*_03 0.0001 0.0000 0.0000 0.0010 1.05
## 319 mFriedman_yager::grnn.fit_none*_04 0.0001 0.0000 0.0000 0.0010 1.04
## 320 mFriedman_yager::grnn.fit_none*_05 0.0001 0.0000 0.0000 0.0010 1.05
## 
## $mIshigami
##                                  event   RMSE    MSE    MAE    WAE time
## 1    mIshigami_AMORE::train_ADAPTgd_01 0.7690 0.5914 0.5722 2.7954 0.22
## 2    mIshigami_AMORE::train_ADAPTgd_02 0.8135 0.6618 0.6083 2.9968 0.23
## 3    mIshigami_AMORE::train_ADAPTgd_03 0.8481 0.7192 0.6236 3.1321 0.22
## 318 mIshigami_yager::grnn.fit_none*_03 0.3204 0.1027 0.2356 1.2542 1.05
## 319 mIshigami_yager::grnn.fit_none*_04 0.3204 0.1027 0.2356 1.2542 1.03
## 320 mIshigami_yager::grnn.fit_none*_05 0.3204 0.1027 0.2356 1.2542 1.03
## 
## $mRef153
##                                event   RMSE     MSE    MAE     WAE time
## 1    mRef153_AMORE::train_ADAPTgd_01 3.6054 12.9988 2.5821 13.7798 0.05
## 2    mRef153_AMORE::train_ADAPTgd_02 3.3454 11.1916 2.3809 13.4872 0.01
## 3    mRef153_AMORE::train_ADAPTgd_03 3.3184 11.0118 2.3437 13.7131 0.05
## 318 mRef153_yager::grnn.fit_none*_03 1.3399  1.7954 0.6881  7.4019 0.08
## 319 mRef153_yager::grnn.fit_none*_04 1.3399  1.7954 0.6881  7.4019 0.08
## 320 mRef153_yager::grnn.fit_none*_05 1.3399  1.7954 0.6881  7.4019 0.07
gfr <- lapply(lf, function(dfr) cbind(
                      ds   = str_remove(str_extract(dfr$event, "\\w+_"), "_"),
                      pfa  = str_sub(str_remove(dfr$event, str_extract(dfr$event, "\\w+_")),  1, -4),
                      run  = str_sub(dfr$event, -2, -1),
                      dfr[,c("RMSE","MAE","WAE","time")]
                      ))

yfr <- lapply(gfr, function(dfr) {
            as.data.frame(dfr %>%
            group_by(pfa) %>%
            summarise(time.mean = mean(time), 
                      RMSE.min = min(RMSE), 
                      RMSE.med = median(RMSE),
                      RMSE.d51 = median(RMSE) - min(RMSE),
                      MAE.med  = median(MAE),
                      WAE.med  = median(WAE)
                      )
            )})
## `summarise()` ungrouping output (override with `.groups` argument)
## `summarise()` ungrouping output (override with `.groups` argument)
## `summarise()` ungrouping output (override with `.groups` argument)
## `summarise()` ungrouping output (override with `.groups` argument)
yfr <- lapply(yfr, function(dfr) transform(dfr, npfa = 1:nrow(dfr)))
ht9(yfr)
## $mDette
##                          pfa time.mean RMSE.min RMSE.med RMSE.d51 MAE.med
## 1       AMORE::train_ADAPTgd     0.128   0.4391   0.4564   0.0173  0.3246
## 2     AMORE::train_ADAPTgdwm     0.184   0.3972   0.4012   0.0040  0.3084
## 3       AMORE::train_BATCHgd     1.870   1.8688   1.8999   0.0311  1.5158
## 62 validann::ann_Nelder-Mead     2.126   3.1073   3.5453   0.4380  2.7197
## 63        validann::ann_SANN     0.172   3.3417   4.0522   0.7105  2.9633
## 64     yager::grnn.fit_none*     1.064   0.4741   0.4741   0.0000  0.3193
##    WAE.med npfa
## 1   2.0005    1
## 2   1.7312    2
## 3   8.6487    3
## 62 17.3854   62
## 63 19.6574   63
## 64  1.9227   64
## 
## $mFriedman
##                          pfa time.mean RMSE.min RMSE.med RMSE.d51 MAE.med
## 1       AMORE::train_ADAPTgd     0.128   0.0264   0.0296   0.0032  0.0235
## 2     AMORE::train_ADAPTgdwm     0.178   0.0439   0.0450   0.0011  0.0321
## 3       AMORE::train_BATCHgd     1.876   0.0177   0.0816   0.0639  0.0748
## 62 validann::ann_Nelder-Mead     6.334   0.0991   0.1082   0.0091  0.0820
## 63        validann::ann_SANN     0.204   0.1414   0.1485   0.0071  0.1149
## 64     yager::grnn.fit_none*     1.044   0.0001   0.0001   0.0000  0.0000
##    WAE.med npfa
## 1   0.1101    1
## 2   0.1788    2
## 3   0.1692    3
## 62  0.3701   62
## 63  0.5629   63
## 64  0.0010   64
## 
## $mIshigami
##                          pfa time.mean RMSE.min RMSE.med RMSE.d51 MAE.med
## 1       AMORE::train_ADAPTgd     0.222   0.7690   0.8135   0.0445  0.6083
## 2     AMORE::train_ADAPTgdwm     0.330   0.8636   0.9950   0.1314  0.7280
## 3       AMORE::train_BATCHgd     2.624   2.5215   2.5544   0.0329  2.1768
## 62 validann::ann_Nelder-Mead    16.114   2.6029   2.6812   0.0783  2.2886
## 63        validann::ann_SANN     0.270   2.9199   2.9986   0.0787  2.4922
## 64     yager::grnn.fit_none*     1.038   0.3204   0.3204   0.0000  0.2356
##    WAE.med npfa
## 1   2.9968    1
## 2   3.8394    2
## 3   6.3018    3
## 62  7.2908   62
## 63 10.0706   63
## 64  1.2542   64
## 
## $mRef153
##                          pfa time.mean RMSE.min RMSE.med RMSE.d51 MAE.med
## 1       AMORE::train_ADAPTgd     0.038   3.3184   3.3402   0.0218  2.3679
## 2     AMORE::train_ADAPTgdwm     0.050   3.3209   3.5541   0.2332  2.5492
## 3       AMORE::train_BATCHgd     1.436   3.3610   3.5412   0.1802  2.5557
## 62 validann::ann_Nelder-Mead    22.952   3.9234   4.1325   0.2091  3.1924
## 63        validann::ann_SANN     0.140   5.7592   7.3556   1.5964  5.9016
## 64     yager::grnn.fit_none*     0.078   1.3399   1.3399   0.0000  0.6881
##    WAE.med npfa
## 1  13.7131    1
## 2  14.1047    2
## 3  13.2065    3
## 62 14.4615   62
## 63 19.8265   63
## 64  7.4019   64

3 Calculate ranks per datasets and merge results

rankMOFtime <- function(dfr) {
    dfrtime <- dfr[order(dfr$time.mean),]
    dfrRMSE <- dfr[order(dfr$RMSE.min, dfr$time.mean, dfr$RMSE.med),]
    dfrRMSEmed  <- dfr[order(dfr$RMSE.med, dfr$RMSE.min, dfr$time.mean),]
    dfrRMSEd51  <- dfr[order(dfr$RMSE.d51),]
    dfrMAE      <- dfr[order(dfr$MAE.med),]
    dfrWAE      <- dfr[order(dfr$WAE.med),]
    transform(dfr, 
              time.rank = order(dfrtime$npfa),
              RMSE.rank = order(dfrRMSE$npfa),
              RMSEmed.rank  = order(dfrRMSEmed$npfa),
              RMSEd51.rank  = order(dfrRMSEd51$npfa),
              MAE.rank = order(dfrMAE$npfa),
              WAE.rank = order(dfrWAE$npfa)
              )
}
sfr     <- lapply(yfr, rankMOFtime)
sfrwide <- do.call(cbind, sfr)

4 Global scores on combined datasets (final table)

sfr.time   <- sfrwide[, c(grep("time.rank", colnames(sfrwide)))]
time.score <- rank(apply(sfr.time, 1, sum), ties.method = "min")
sfr.RMSE       <- sfrwide[, c(grep("RMSE.rank", colnames(sfrwide)))]
RMSE.score     <- rank(apply(sfr.RMSE, 1, sum), ties.method = "min")
sfr.RMSEmed    <- sfrwide[, c(grep("RMSEmed.rank", colnames(sfrwide)))]
RMSEmed.score  <- rank(apply(sfr.RMSEmed, 1, sum), ties.method = "min")
sfr.RMSEd51    <- sfrwide[, c(grep("RMSEd51.rank", colnames(sfrwide)))]
RMSEd51.score  <- rank(apply(sfr.RMSEd51, 1, sum), ties.method = "min")
sfr.MAE       <- sfrwide[, c(grep("MAE.rank", colnames(sfrwide)))]
MAE.score     <- rank(apply(sfr.MAE, 1, sum), ties.method = "min")
sfr.WAE       <- sfrwide[, c(grep("WAE.rank", colnames(sfrwide)))]
WAE.score     <- rank(apply(sfr.WAE, 1, sum), ties.method = "min")

scoredfr0 <- data.frame(sfr$mDette[,"pfa",drop=FALSE], 
# scoredfr0 <- data.frame(sfr$uNeuroOne[,c("pfa")], 
                        time.score, 
                        RMSE.score, 
                        RMSEmed.score,
                        RMSEd51.score,
              MAE.score,
              WAE.score)

scoredfr <- scoredfr0[order(scoredfr0$RMSE.score),]
rownames(scoredfr) <- NULL

kable(scoredfr)%>%
  kable_styling(bootstrap_options = c("striped", "hover", "condensed"))
pfa time.score RMSE.score RMSEmed.score RMSEd51.score MAE.score WAE.score
nnet::nnet_none 4 1 3 27 4 4
nlsr::nlxb_none 26 2 9 43 11 18
radiant.model::nn_none 9 2 5 36 7 8
yager::grnn.fit_none* 28 4 1 8 1 1
rminer::fit_none 14 5 2 11 2 2
MachineShop::fit_none 5 6 17 51 17 16
validann::ann_BFGS 38 7 8 35 8 11
validann::ann_CG 62 8 4 15 3 3
traineR::train.nnet_none 6 9 10 32 11 6
qrnn::qrnn.fit_none 22 10 15 31 5 29
monmlp::monmlp.fit_BFGS 19 11 6 13 6 7
brnn::brnn_Gauss-Newton 11 12 11 14 14 9
neuralnet::neuralnet_rprop+ 34 13 14 25 10 13
EnsembleBase::Regression.Batch.Fit_none 3 14 27 51 25 30
RSNNS::mlp_SCG 32 15 13 21 15 19
CaDENCE::cadence.fit_optim 53 16 37 64 29 48
h2o::h2o.deeplearning_first-order 47 17 11 15 13 15
neuralnet::neuralnet_rprop- 31 18 21 37 17 25
validann::ann_L-BFGS-B 41 18 23 48 24 26
AMORE::train_ADAPTgd 7 20 7 10 8 5
automl::automl_train_manual_trainwgrad_adam 55 21 18 30 19 23
AMORE::train_ADAPTgdwm 12 22 16 18 16 13
ANN2::neuralnetwork_rmsprop 16 23 20 24 22 10
ANN2::neuralnetwork_adam 17 24 19 17 20 17
RSNNS::mlp_Rprop 24 25 28 60 28 33
ANN2::neuralnetwork_sgd 15 26 25 29 23 12
deepnet::nn.train_BP 20 27 31 54 36 42
caret::avNNet_none 9 28 22 38 21 19
AMORE::train_BATCHgdwm 38 29 24 12 26 21
AMORE::train_BATCHgd 37 30 33 28 37 22
keras::fit_adamax 44 30 26 20 27 27
RSNNS::mlp_BackpropWeightDecay 26 32 28 47 32 28
neuralnet::neuralnet_slr 43 33 46 46 49 49
automl::automl_train_manual_trainwgrad_RMSprop 50 34 31 45 30 31
RSNNS::mlp_BackpropChunk 25 35 33 40 33 39
keras::fit_adagrad 58 36 42 54 43 35
RSNNS::mlp_Std_Backpropagation 21 37 36 23 34 43
minpack.lm::nlsLM_none 18 38 30 6 31 24
keras::fit_sgd 45 39 45 48 46 44
RSNNS::mlp_BackpropMomentum 23 39 38 41 38 41
BNN::BNNsel_none* 60 41 44 53 44 39
keras::fit_adam 40 42 35 39 35 31
snnR::snnR_none 7 43 38 18 39 35
RSNNS::mlp_BackpropBatch 46 44 40 22 41 38
keras::fit_adadelta 63 45 43 25 42 33
keras::fit_nadam 42 46 48 62 51 47
neuralnet::neuralnet_sag 56 47 59 54 57 64
automl::automl_train_manual_trainwpso 58 48 47 58 49 45
CaDENCE::cadence.fit_Rprop 61 48 54 63 47 59
deepdive::deepnet_adam 29 50 41 1 40 52
TrafficBDE::TrainCR_none* 64 51 52 7 52 55
CaDENCE::cadence.fit_psoptim 57 52 50 34 44 57
validann::ann_Nelder-Mead 50 53 53 33 55 45
monmlp::monmlp.fit_Nelder-Mead 33 54 55 42 54 50
deepdive::deepnet_momentum 53 55 49 3 47 52
deepdive::deepnet_rmsProp 30 56 51 4 53 51
GMDHreg::gmdh.combi_none* 35 57 56 5 56 37
keras::fit_rmsprop 36 57 57 58 58 56
deepdive::deepnet_gradientDescent 50 59 58 2 59 54
validann::ann_SANN 13 60 60 50 60 60
neuralnet::neuralnet_backprop 48 61 61 9 61 58
elmNNRcpp::elm_train_extremeML 1 62 64 61 62 61
ELMR::OSelm_train.formula_extremeML 2 62 61 57 63 63
RSNNS::mlp_Quickprop 49 64 61 43 64 62

5 Figures

5.1 Some plots

plot(scoredfr[,c("time.score", "RMSE.score", "RMSEmed.score", "RMSEd51.score")], las = 1)

op <- par(mfrow = c(1,3), las = 1, mar = c(0,0.5,0,0.5), oma = c(2,2,3.5,2), cex = 1.1)
plot(scoredfr[,c("RMSE.score", "RMSEmed.score")]); abline(v=10.5, lty = 2)
mtext("x=RMSE.score,  y=RMSEmed.score", line = 1.5, font = 2)
plot(scoredfr[,c("RMSE.score", "time.score")], yaxt = "n"); abline(v=10.5, lty = 2)
mtext("x=RMSE.score,  y=time.score", line = 1.5, font = 2)
plot(scoredfr[,c("RMSE.score", "RMSEd51.score")], yaxt = "n"); Axis(scoredfr[,5], side = 4)
mtext("x=RMSE.score,  y=RMSEd51.score", line = 1.5, font = 2)

# mtext("(x=RMSE.score, y=RMSEmed.score)    (x=RMSE.score, y=time.score)    (x=RMSE.score, y=RMSEd51.score)", 
      # outer = TRUE, line = 2, font = 2)
par(op)

5.2 Comparison of global scores and scores per dataset

## =====================================
## GLOBAL SCORE APPLIED TO EVERY DATASET
## =====================================
merge_sfr_dfr <- function(x, y) {
    z <- cbind(
            x[,c("npfa","pfa","time.mean","RMSE.min","time.rank","RMSE.rank")], 
            y[,c("time.score","RMSE.score")]
        )
    z[order(z$RMSE.score),]
}
zfr <- lapply(sfr, merge_sfr_dfr, y = scoredfr0)


## =========================
## GRAPHIC RMSEscore_RMSEmin
## =========================
op <- par(mfrow = c(4,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
names(zfr)[j]
plot(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "RMSE.min"]),
     xlab = "RMSE.score", ylab = "RMSE.min", # main = names(zfr)[j], 
     las = 1, col = 0, xaxt = "n", yaxt = "n")
mtext(names(zfr)[j], line = -1.2, cex = 0.8)
text(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "RMSE.min"]),
     labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=RMSE.score (global)   y=RMSE.min (per dataset)", outer = TRUE, line = 1)


## ==============================
## GRAPHIC RMSEscore_timemean
## ==============================
op <- par(mfrow = c(4,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))

for (j in seq_along(zfr)) {
names(zfr)[j]
plot(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "time.mean"]),
     xlab = "RMSE.score", ylab = "RMSE.min", # main = names(zfr)[j], 
     las = 1, col = 0, xaxt = "n", yaxt = "n")
mtext(names(zfr)[j], line = -1.2, cex = 0.8)
text(log1p(zfr[[j]][, "RMSE.score"]), log1p(zfr[[j]][, "time.mean"]),
     labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=RMSE.score (global)   y=time.mean (per dataset)", outer = TRUE, line = 1)

5.3 By different number of algorithms

## =======================================
## GRAPHIC RMSEmin_timemean - 49 algos
## =======================================
op <- par(mfrow = c(4,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))
for (j in seq_along(zfr)) {
names(zfr)[j]
plot(log1p(zfr[[j]][, "RMSE.min"]), log1p(zfr[[j]][, "time.mean"]),
     xlab = "RMSE.min", ylab = "time.mean", # main = names(zfr)[j], 
     las = 1, col = 0, xaxt = "n", yaxt = "n")
mtext(names(zfr)[j], line = -1.2, cex = 0.8)
text(log1p(zfr[[j]][, "RMSE.min"]), log1p(zfr[[j]][, "time.mean"]),
     labels = zfr[[j]][, "RMSE.score"])
}
mtext("x=RMSE.min (per dataset)   y=time.mean (per dataset)    49 algos", outer = TRUE, line = 1)


## =======================================
## GRAPHIC RMSEmin_timemean - 12 algos
## =======================================
op <- par(mfrow = c(4,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))

for (j in seq_along(zfr)) {
names(zfr)[j]
plot(log1p(zfr[[j]][1:12, "RMSE.min"]), log1p(zfr[[j]][1:12, "time.mean"]),
     xlab = "RMSE.min", ylab = "time.mean", # main = names(zfr)[j], 
     las = 1, col = 0, xaxt = "n", yaxt = "n")
mtext(names(zfr)[j], line = -1.2, cex = 0.8)
text(log1p(zfr[[j]][1:12, "RMSE.min"]), log1p(zfr[[j]][1:12, "time.mean"]),
     labels = zfr[[j]][1:12, "RMSE.score"])
}
mtext("x=RMSE.min (per dataset)   y=time.mean (per dataset)    12 algos", outer = TRUE, line = 1)


## =======================================
## GRAPHIC RMSEmin_timemean - 09 algos
## =======================================
op <- par(mfrow = c(4,2), las = 1, mar = c(0,0,0,0), oma = c(1,1,3,1))

for (j in seq_along(zfr)) {
names(zfr)[j]
plot(log1p(zfr[[j]][1:9, "RMSE.min"]), log1p(zfr[[j]][1:9, "time.mean"]),
     xlab = "RMSE.min", ylab = "time.mean", # main = names(zfr)[j], 
     las = 1, col = 0, xaxt = "n", yaxt = "n")
mtext(names(zfr)[j], line = -1.2, cex = 0.8)
text(log1p(zfr[[j]][1:9, "RMSE.min"]), log1p(zfr[[j]][1:9, "time.mean"]),
     labels = zfr[[j]][1:9, "RMSE.score"])
}
mtext("x=RMSE.min (per dataset)   y=time.mean (per dataset)    9 algos", outer = TRUE, line = 1)

## THE END
## THE END