Title: | Sensitivity Analysis of Neural Networks |
---|---|
Description: | Analysis functions to quantify inputs importance in neural network models. Functions are available for calculating and plotting the inputs importance and obtaining the activation function of each neuron layer and its derivatives. The importance of a given input is defined as the distribution of the derivatives of the output with respect to that input in each training data point <doi:10.18637/jss.v102.i07>. |
Authors: | José Portela González [aut], Antonio Muñoz San Roque [aut], Jaime Pizarroso Gonzalo [aut, ctb, cre] |
Maintainer: | Jaime Pizarroso Gonzalo <[email protected]> |
License: | GPL (>= 2) |
Version: | 1.1.3 |
Built: | 2024-10-26 05:58:57 UTC |
Source: | https://github.com/jaipizgon/neuralsens |
Evaluate activation function of a neuron
ActFunc(type = "sigmoid", ...)
ActFunc(type = "sigmoid", ...)
type |
|
... |
extra arguments needed to calculate the functions |
numeric
output of the neuron
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
# Return the sigmoid activation function of a neuron ActivationFunction <- ActFunc("sigmoid") # Return the tanh activation function of a neuron ActivationFunction <- ActFunc("tanh") # Return the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, ActFunc)
# Return the sigmoid activation function of a neuron ActivationFunction <- ActFunc("sigmoid") # Return the tanh activation function of a neuron ActivationFunction <- ActFunc("tanh") # Return the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, ActFunc)
Obtain sensitivity alpha-curves associated to MLP function obtained from
the sensitivities returned by SensAnalysisMLP
.
AlphaSensAnalysis( sens, tol = NULL, max_alpha = 15, curve_equal_origin = FALSE, inp_var = NULL, line_width = 1, title = "Alpha curve of Lp norm values", alpha_bar = 1, kind = "line" )
AlphaSensAnalysis( sens, tol = NULL, max_alpha = 15, curve_equal_origin = FALSE, inp_var = NULL, line_width = 1, title = "Alpha curve of Lp norm values", alpha_bar = 1, kind = "line" )
sens |
sensitivity object returned by |
tol |
difference between M_alpha and maximum sensitivity of the sensitivity of each input variable |
max_alpha |
maximum alpha value to analyze |
curve_equal_origin |
make all the curves begin at (1,0) |
inp_var |
|
line_width |
|
title |
|
alpha_bar |
|
kind |
|
alpha-curves of the MLP function
mod <- RSNNS::mlp(simdata[, c("X1", "X2", "X3")], simdata[, "Y"], maxit = 1000, size = 15, linOut = TRUE) sens <- SensAnalysisMLP(mod, trData = simdata, output_name = "Y", plot = FALSE) AlphaSensAnalysis(sens)
mod <- RSNNS::mlp(simdata[, c("X1", "X2", "X3")], simdata[, "Y"], maxit = 1000, size = 15, linOut = TRUE) sens <- SensAnalysisMLP(mod, trData = simdata, output_name = "Y", plot = FALSE) AlphaSensAnalysis(sens)
Obtain sensitivity alpha-curve associated to MLP function obtained from
the sensitivities returned by SensAnalysisMLP
of an input variable.
AlphaSensCurve(sens, tol = NULL, max_alpha = 100)
AlphaSensCurve(sens, tol = NULL, max_alpha = 100)
sens |
raw sensitivities of the MLP output with respect to input variable. |
tol |
difference between M_alpha and maximum sensitivity of the sensitivity of each input variable |
max_alpha |
maximum alpha value to analyze |
alpha-curve of the MLP function
mod <- RSNNS::mlp(simdata[, c("X1", "X2", "X3")], simdata[, "Y"], maxit = 1000, size = 15, linOut = TRUE) sens <- SensAnalysisMLP(mod, trData = simdata, output_name = "Y", plot = FALSE) AlphaSensCurve(sens$raw_sens[[1]][,1])
mod <- RSNNS::mlp(simdata[, c("X1", "X2", "X3")], simdata[, "Y"], maxit = 1000, size = 15, linOut = TRUE) sens <- SensAnalysisMLP(mod, trData = simdata, output_name = "Y", plot = FALSE) AlphaSensCurve(sens$raw_sens[[1]][,1])
For a SensMLP Class object, change the significance level of the statistical tests
ChangeBootAlpha(x, boot.alpha)
ChangeBootAlpha(x, boot.alpha)
x |
|
boot.alpha |
|
SensMLP
object with changed significance level. All boot related
metrics are changed
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 ## TRAIN nnet NNET -------------------------------------------------------- set.seed(150) nnetmod <- caret::train(DEM ~ ., data = fdata.Reg.tr, method = "nnet", tuneGrid = expand.grid(size = c(1), decay = c(0.01)), trControl = caret::trainControl(method="none"), preProcess = c('center', 'scale'), linout = FALSE, trace = FALSE, maxit = 300) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = fdata.Reg.tr, plot = FALSE, boot.R=2, output_name='DEM') NeuralSens::ChangeBootAlpha(sens, boot.alpha=0.1)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 ## TRAIN nnet NNET -------------------------------------------------------- set.seed(150) nnetmod <- caret::train(DEM ~ ., data = fdata.Reg.tr, method = "nnet", tuneGrid = expand.grid(size = c(1), decay = c(0.01)), trControl = caret::trainControl(method="none"), preProcess = c('center', 'scale'), linout = FALSE, trace = FALSE, maxit = 300) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = fdata.Reg.tr, plot = FALSE, boot.R=2, output_name='DEM') NeuralSens::ChangeBootAlpha(sens, boot.alpha=0.1)
Plot of sensitivity of the neural network output respect to the inputs over the time variable from the data provided
CombineSens(object, comb_type = "mean")
CombineSens(object, comb_type = "mean")
object |
|
comb_type |
Function to combine the matrixes of the |
SensMLP
object with the sensitivities combined
fdata <- iris ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata)[1:ncol(fdata)-1], collapse = " + ") form <- formula(paste(names(fdata)[5], form, sep = " ~ ")) set.seed(150) mod <- nnet::nnet(form, data = fdata, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # mod should be a neural network classification model sens <- SensAnalysisMLP(mod, trData = fdata, output_name = 'Species') combinesens <- CombineSens(sens, "sqmean")
fdata <- iris ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata)[1:ncol(fdata)-1], collapse = " + ") form <- formula(paste(names(fdata)[5], form, sep = " ~ ")) set.seed(150) mod <- nnet::nnet(form, data = fdata, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # mod should be a neural network classification model sens <- SensAnalysisMLP(mod, trData = fdata, output_name = 'Species') combinesens <- CombineSens(sens, "sqmean")
Function to plot the sensitivities created by SensAnalysisMLP
.
ComputeHessMeasures(sens)
ComputeHessMeasures(sens)
sens |
|
SensAnalysisMLP
object with the sensitivities calculated
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE)
Function to plot the sensitivities created by SensAnalysisMLP
.
ComputeSensMeasures(sens)
ComputeSensMeasures(sens)
sens |
|
SensAnalysisMLP
object with the sensitivities calculated
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE)
Training dataset with values of temperature and working day to predict electrical demand
A data frame with 1980 rows and 4 variables:
date of the measure
electrical demand
Working Day: index which express how much work is made that day
weather temperature
Jose Portela Gonzalez
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
Validation dataset with values of temperature and working day to predict electrical demand
A data frame with 7 rows and 3 variables:
date of the measure
Working Day: index which express how much work is made that day
weather temperature
Jose Portela Gonzalez
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
Evaluate second derivative of activation function of a neuron
Der2ActFunc(type = "sigmoid", ...)
Der2ActFunc(type = "sigmoid", ...)
type |
|
... |
extra arguments needed to calculate the functions |
numeric
output of the neuron
# Return derivative of the sigmoid activation function of a neuron ActivationFunction <- Der2ActFunc("sigmoid") # Return derivative of the tanh activation function of a neuron ActivationFunction <- Der2ActFunc("tanh") # Return derivative of the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, Der2ActFunc)
# Return derivative of the sigmoid activation function of a neuron ActivationFunction <- Der2ActFunc("sigmoid") # Return derivative of the tanh activation function of a neuron ActivationFunction <- Der2ActFunc("tanh") # Return derivative of the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, Der2ActFunc)
Evaluate third derivative of activation function of a neuron
Der3ActFunc(type = "sigmoid", ...)
Der3ActFunc(type = "sigmoid", ...)
type |
|
... |
extra arguments needed to calculate the functions |
numeric
output of the neuron
# Return derivative of the sigmoid activation function of a neuron ActivationFunction <- Der3ActFunc("sigmoid") # Return derivative of the tanh activation function of a neuron ActivationFunction <- Der3ActFunc("tanh") # Return derivative of the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, Der3ActFunc)
# Return derivative of the sigmoid activation function of a neuron ActivationFunction <- Der3ActFunc("sigmoid") # Return derivative of the tanh activation function of a neuron ActivationFunction <- Der3ActFunc("tanh") # Return derivative of the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, Der3ActFunc)
Evaluate derivative of activation function of a neuron
DerActFunc(type = "sigmoid", ...)
DerActFunc(type = "sigmoid", ...)
type |
|
... |
extra arguments needed to calculate the functions |
numeric
output of the neuron
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
# Return derivative of the sigmoid activation function of a neuron ActivationFunction <- DerActFunc("sigmoid") # Return derivative of the tanh activation function of a neuron ActivationFunction <- DerActFunc("tanh") # Return derivative of the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, DerActFunc)
# Return derivative of the sigmoid activation function of a neuron ActivationFunction <- DerActFunc("sigmoid") # Return derivative of the tanh activation function of a neuron ActivationFunction <- DerActFunc("tanh") # Return derivative of the activation function of several layers of neurons actfuncs <- c("linear","sigmoid","linear") ActivationFunctions <- sapply(actfuncs, DerActFunc)
Define function to create a 'diagonal' array or get the diagonal of an array
diag3Darray(x = 1, dim = length(x), out = "vector")
diag3Darray(x = 1, dim = length(x), out = "vector")
x |
|
dim |
|
out |
|
The diagonal of a 3D array has been defined as those elements in positions c(int,int,int), i.e., the three digits are the same.
If the diagonal should be returned, out
specifies if it should return a "vector"
with
the elements of position c(int,int,int), or "matrix"
with the elements of position c(int,dim,int),
i.e., dim = 2
-> elements (1,1,1),(2,1,2),(3,1,3),(1,2,1),(2,2,2),(3,2,3),(3,1,3),(3,2,3),(3,3,3).
array
with all elements zero except the diagonal, with dimensions c(dim,dim,dim)
x <- diag3Darray(c(1,4,6), dim = 3) x # , , 1 # # [,1] [,2] [,3] # [1,] 1 0 0 # [2,] 0 0 0 # [3,] 0 0 0 # # , , 2 # # [,1] [,2] [,3] # [1,] 0 0 0 # [2,] 0 4 0 # [3,] 0 0 0 # # , , 3 # # [,1] [,2] [,3] # [1,] 0 0 0 # [2,] 0 0 0 # [3,] 0 0 6 diag3Darray(x) # 1, 4, 6
x <- diag3Darray(c(1,4,6), dim = 3) x # , , 1 # # [,1] [,2] [,3] # [1,] 1 0 0 # [2,] 0 0 0 # [3,] 0 0 0 # # , , 2 # # [,1] [,2] [,3] # [1,] 0 0 0 # [2,] 0 4 0 # [3,] 0 0 0 # # , , 3 # # [,1] [,2] [,3] # [1,] 0 0 0 # [2,] 0 0 0 # [3,] 0 0 6 diag3Darray(x) # 1, 4, 6
Define function to change the diagonal of array
diag3Darray(x) <- value
diag3Darray(x) <- value
x |
|
value |
|
The diagonal of a 3D array has been defined as those elements in positions c(int,int,int), i.e., the three digits are the same.
array
with all elements zero except the diagonal, with dimensions c(dim,dim,dim)
x <- array(1, dim = c(3,3,3)) diag3Darray(x) <- c(2,2,2) x # , , 1 # # [,1] [,2] [,3] # [1,] 2 1 1 # [2,] 1 1 1 # [3,] 1 1 1 # # , , 2 # # [,1] [,2] [,3] # [1,] 1 1 1 # [2,] 1 2 1 # [3,] 1 1 1 # # , , 3 # # [,1] [,2] [,3] # [1,] 1 1 1 # [2,] 1 1 1 # [3,] 1 1 2
x <- array(1, dim = c(3,3,3)) diag3Darray(x) <- c(2,2,2) x # , , 1 # # [,1] [,2] [,3] # [1,] 2 1 1 # [2,] 1 1 1 # [3,] 1 1 1 # # , , 2 # # [,1] [,2] [,3] # [1,] 1 1 1 # [2,] 1 2 1 # [3,] 1 1 1 # # , , 3 # # [,1] [,2] [,3] # [1,] 1 1 1 # [2,] 1 1 1 # [3,] 1 1 2
Define function to create a 'diagonal' array or get the diagonal of an array
diag4Darray(x = 1, dim = length(x))
diag4Darray(x = 1, dim = length(x))
x |
|
dim |
|
The diagonal of a 4D array has been defined as those elements in positions c(int,int,int,int), i.e., the four digits are the same.
array
with all elements zero except the diagonal, with dimensions c(dim,dim,dim)
x <- diag4Darray(c(1,3,6,2), dim = 4) x # , , 1, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 1 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 1 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 4, 1 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 1, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 3 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 4, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 1, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 6 0 # [4,] 0 0 0 0 # # , , 4, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 1, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 4, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 2 diag4Darray(x) # 1, 3, 6, 2
x <- diag4Darray(c(1,3,6,2), dim = 4) x # , , 1, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 1 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 1 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 4, 1 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 1, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 3 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 4, 2 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 1, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 6 0 # [4,] 0 0 0 0 # # , , 4, 3 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 1, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 2, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 3, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 0 # # , , 4, 4 # # [,1] [,2] [,3] [,4] # [1,] 0 0 0 0 # [2,] 0 0 0 0 # [3,] 0 0 0 0 # [4,] 0 0 0 2 diag4Darray(x) # 1, 3, 6, 2
Define function to change the diagonal of array
diag4Darray(x) <- value
diag4Darray(x) <- value
x |
|
value |
|
The diagonal of a 3D array has been defined as those elements in positions c(int,int,int), i.e., the three digits are the same.
array
with all elements zero except the diagonal, with dimensions c(dim,dim,dim)
x <- array(1, dim = c(4,4,4,4)) diag4Darray(x) <- c(2,2,2,2) x # , , 1, 1 # # [,1] [,2] [,3] [,4] # [1,] 2 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 4, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 1, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 2 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 4, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 1, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 2 1 # [4,] 1 1 1 1 # # , , 4, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 1, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 4, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 2
x <- array(1, dim = c(4,4,4,4)) diag4Darray(x) <- c(2,2,2,2) x # , , 1, 1 # # [,1] [,2] [,3] [,4] # [1,] 2 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 4, 1 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 1, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 2 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 4, 2 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 1, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 2 1 # [4,] 1 1 1 1 # # , , 4, 3 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 1, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 2, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 3, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 1 # # , , 4, 4 # # [,1] [,2] [,3] [,4] # [1,] 1 1 1 1 # [2,] 1 1 1 1 # [3,] 1 1 1 1 # [4,] 1 1 1 2
This function finds the smallest x such that the probability of a random variable being less than or equal to x is greater than or equal to 1 - alpha. It uses the uniroot function to find where the empirical cumulative distribution function (ECDF) crosses 1 - alpha.
find_critical_value(ecdf_func, alpha)
find_critical_value(ecdf_func, alpha)
ecdf_func |
An ECDF function representing the distribution of a random variable. |
alpha |
A numeric value specifying the significance level. |
The smallest x such that P(X <= x) >= 1 - alpha.
data <- rnorm(100) ecdf_data <- ecdf(data) critical_val <- find_critical_value(ecdf_data, 0.05)
data <- rnorm(100) ecdf_data <- ecdf(data) critical_val <- find_critical_value(ecdf_data, 0.05)
3D Plot of second derivatives of the neural network output respect
to the inputs. This function use plotly
instead of ggplot2
to
achieve better visualization
HessDotPlot( object, fdata = NULL, input_vars = "all", input_vars2 = "all", output_vars = "all", surface = FALSE, grid = FALSE, color = NULL, ... )
HessDotPlot( object, fdata = NULL, input_vars = "all", input_vars2 = "all", output_vars = "all", surface = FALSE, grid = FALSE, color = NULL, ... )
object |
fitted neural network model or |
fdata |
|
input_vars |
|
input_vars2 |
|
output_vars |
|
surface |
|
grid |
|
color |
|
... |
further arguments that should be passed to |
list of 3D geom_point
plots for the inputs variables representing the
sensitivity of each output respect to the inputs
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessDotPlot NeuralSens::HessDotPlot(nnetmod, fdata = nntrData, surface = TRUE, color = "WD")
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessDotPlot NeuralSens::HessDotPlot(nnetmod, fdata = nntrData, surface = TRUE, color = "WD")
Show the distribution of the sensitivities of the output
in geom_sina()
plot which color depends on the input values
HessFeaturePlot(object, fdata = NULL, ...)
HessFeaturePlot(object, fdata = NULL, ...)
object |
fitted neural network model or |
fdata |
|
... |
further arguments that should be passed to |
list of Feature sensitivity plot as described in https://www.r-bloggers.com/2019/03/a-gentle-introduction-to-shap-values-in-r/
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP hess <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::HessFeaturePlot(hess)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP hess <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::HessFeaturePlot(hess)
Function for evaluating the sensitivities of the inputs variables in a mlp model
HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## Default S3 method: HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, deractfunc = NULL, der2actfunc = NULL, preProc = NULL, terms = NULL, output_name = NULL, ... ) ## S3 method for class 'train' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'H2OMultinomialModel' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'H2ORegressionModel' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'list' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc, ... ) ## S3 method for class 'mlp' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nn' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnet' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnetar' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'numeric' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, preProc = NULL, terms = NULL, ... )
HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## Default S3 method: HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, deractfunc = NULL, der2actfunc = NULL, preProc = NULL, terms = NULL, output_name = NULL, ... ) ## S3 method for class 'train' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'H2OMultinomialModel' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'H2ORegressionModel' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'list' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc, ... ) ## S3 method for class 'mlp' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nn' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnet' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnetar' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'numeric' HessianMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, preProc = NULL, terms = NULL, ... )
MLP.fit |
fitted neural network model |
.returnSens |
DEPRECATED |
plot |
|
.rawSens |
DEPRECATED |
sens_origin_layer |
|
sens_end_layer |
|
sens_origin_input |
|
sens_end_input |
|
... |
additional arguments passed to or from other methods |
trData |
|
actfunc |
|
deractfunc |
|
der2actfunc |
|
preProc |
preProcess structure applied to the training data. See also
|
terms |
function applied to the training data to create factors. See
also |
output_name |
|
In case of using an input of class factor
and a package which
need to enter the input data as matrix, the dummies must be created before
training the neural network.
After that, the training data must be given to the function using the
trData
argument.
SensMLP
object with the sensitivity metrics and sensitivities of
the MLP model passed to the function.
Plot 1: colorful plot with the classification of the classes in a 2D map
Plot 2: b/w plot with probability of the chosen class in a 2D map
Plot 3: plot with the stats::predictions of the data provided
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) # Try HessianMLP to calculate sensitivities with respect to output of hidden neurones NeuralSens::HessianMLP(nnetmod, trData = nntrData, sens_origin_layer = 2, sens_end_layer = "last", sens_origin_input = FALSE, sens_end_input = FALSE) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.tr, method = "nnet", linout = TRUE, tuneGrid = data.frame(size = 3, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "RMSE") # Try HessianMLP NeuralSens::HessianMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create a cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.tr, destination_frame = "fdata_h2o") set.seed(150) h2omod <-h2o:: h2o.deeplearning(x = names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], y = names(fdata.Reg.tr)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try HessianMLP NeuralSens::HessianMLP(h2omod) # Turn off the cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## Train RSNNS NNET ------------------------------------------------------------ # Normalize data using RSNNS algorithms trData <- as.data.frame(RSNNS::normalizeData(fdata.Reg.tr)) names(trData) <- names(fdata.Reg.tr) set.seed(150) RSNNSmod <-RSNNS::mlp(x = trData[,2:ncol(trData)], y = trData[,1], size = hidden_neurons, linOut = TRUE, learnFuncParams=c(decay), maxit=iters) # Try HessianMLP NeuralSens::HessianMLP(RSNNSmod, trData = trData, output_name = "DEM") ## USE DEFAULT METHOD ---------------------------------------------------------- NeuralSens::HessianMLP(caretmod$finalModel$wts, trData = fdata.Reg.tr, mlpstr = caretmod$finalModel$n, coefnames = caretmod$coefnames, actfun = c("linear","sigmoid","linear"), output_name = "DEM") ################################################################################ ######################### CLASSIFICATION NNET ################################# ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.cl <- fdata[,2:ncol(fdata)] fdata.Reg.cl[,2:3] <- fdata.Reg.cl[,2:3]/10 fdata.Reg.cl[,1] <- fdata.Reg.cl[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) # Factorize the output fdata.Reg.cl$DEM <- factor(round(fdata.Reg.cl$DEM, digits = 1)) # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.cl, method = "nnet", linout = FALSE, tuneGrid = data.frame(size = hidden_neurons, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "Accuracy") # Try HessianMLP NeuralSens::HessianMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create local cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.cl, destination_frame = "fdata_h2o") set.seed(150) h2omod <- h2o::h2o.deeplearning(x = names(fdata.Reg.cl)[2:ncol(fdata.Reg.cl)], y = names(fdata.Reg.cl)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try HessianMLP NeuralSens::HessianMLP(h2omod) # Apaga el cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP NeuralSens::HessianMLP(nnetmod, trData = nntrData)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) # Try HessianMLP to calculate sensitivities with respect to output of hidden neurones NeuralSens::HessianMLP(nnetmod, trData = nntrData, sens_origin_layer = 2, sens_end_layer = "last", sens_origin_input = FALSE, sens_end_input = FALSE) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.tr, method = "nnet", linout = TRUE, tuneGrid = data.frame(size = 3, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "RMSE") # Try HessianMLP NeuralSens::HessianMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create a cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.tr, destination_frame = "fdata_h2o") set.seed(150) h2omod <-h2o:: h2o.deeplearning(x = names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], y = names(fdata.Reg.tr)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try HessianMLP NeuralSens::HessianMLP(h2omod) # Turn off the cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## Train RSNNS NNET ------------------------------------------------------------ # Normalize data using RSNNS algorithms trData <- as.data.frame(RSNNS::normalizeData(fdata.Reg.tr)) names(trData) <- names(fdata.Reg.tr) set.seed(150) RSNNSmod <-RSNNS::mlp(x = trData[,2:ncol(trData)], y = trData[,1], size = hidden_neurons, linOut = TRUE, learnFuncParams=c(decay), maxit=iters) # Try HessianMLP NeuralSens::HessianMLP(RSNNSmod, trData = trData, output_name = "DEM") ## USE DEFAULT METHOD ---------------------------------------------------------- NeuralSens::HessianMLP(caretmod$finalModel$wts, trData = fdata.Reg.tr, mlpstr = caretmod$finalModel$n, coefnames = caretmod$coefnames, actfun = c("linear","sigmoid","linear"), output_name = "DEM") ################################################################################ ######################### CLASSIFICATION NNET ################################# ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.cl <- fdata[,2:ncol(fdata)] fdata.Reg.cl[,2:3] <- fdata.Reg.cl[,2:3]/10 fdata.Reg.cl[,1] <- fdata.Reg.cl[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) # Factorize the output fdata.Reg.cl$DEM <- factor(round(fdata.Reg.cl$DEM, digits = 1)) # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.cl, method = "nnet", linout = FALSE, tuneGrid = data.frame(size = hidden_neurons, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "Accuracy") # Try HessianMLP NeuralSens::HessianMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create local cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.cl, destination_frame = "fdata_h2o") set.seed(150) h2omod <- h2o::h2o.deeplearning(x = names(fdata.Reg.cl)[2:ncol(fdata.Reg.cl)], y = names(fdata.Reg.cl)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try HessianMLP NeuralSens::HessianMLP(h2omod) # Apaga el cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP NeuralSens::HessianMLP(nnetmod, trData = nntrData)
Create an object of HessMLP class
HessMLP( sens = list(), raw_sens = list(), mlp_struct = numeric(), trData = data.frame(), coefnames = character(), output_name = character() )
HessMLP( sens = list(), raw_sens = list(), mlp_struct = numeric(), trData = data.frame(), coefnames = character(), output_name = character() )
sens |
|
raw_sens |
|
mlp_struct |
|
trData |
|
coefnames |
|
output_name |
|
HessMLP
object
Auxiliary function to turn a HessMLP object to a SensMLP object in order to use the plot-related functions associated with SensMLP
HessToSensMLP(x)
HessToSensMLP(x)
x |
|
SensMLP
object
HessMLP
Check if object is of class HessMLP
is.HessMLP(object)
is.HessMLP(object)
object |
|
TRUE
if object
is a HessMLP
object
SensMLP
Check if object is of class SensMLP
is.SensMLP(object)
is.SensMLP(object)
object |
|
TRUE
if object
is a SensMLP
object
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
This function implements the k-stepM algorithm for multiple hypothesis testing. It tests each hypothesis using the critical value calculated from the ECDF of the k-max differences, updating the critical value, and iterating until all hypotheses are tested.
kStepMAlgorithm(original_stats, bootstrap_stats, num_hypotheses, alpha, k)
kStepMAlgorithm(original_stats, bootstrap_stats, num_hypotheses, alpha, k)
original_stats |
A numeric vector of original test statistics for each hypothesis. |
bootstrap_stats |
A numeric matrix of bootstrap test statistics, with rows representing bootstrap samples and columns representing hypotheses. |
num_hypotheses |
An integer specifying the total number of hypotheses. |
alpha |
A numeric value specifying the significance level. |
k |
An integer specifying the threshold number for controlling the k-familywise error rate. |
A list containing two elements: 'signif', a logical vector indicating which hypotheses are rejected, and 'cv', a numeric vector of critical values used for each hypothesis.
Romano, Joseph P., Azeem M. Shaikh, and Michael Wolf. "Formalized data snooping based on generalized error rates." Econometric Theory 24.2 (2008): 404-447.
original_stats <- rnorm(10) bootstrap_stats <- matrix(rnorm(1000), ncol = 10) result <- kStepMAlgorithm(original_stats, bootstrap_stats, 10, 0.05, 1)
original_stats <- rnorm(10) bootstrap_stats <- matrix(rnorm(1000), ncol = 10) result <- kStepMAlgorithm(original_stats, bootstrap_stats, 10, 0.05, 1)
Visualization and analysis tools to aid in the interpretation of neural network models.
Maintainer: Jaime Pizarroso Gonzalo [email protected] [contributor]
Authors:
José Portela González [email protected]
Antonio Muñoz San Roque [email protected]
Useful links:
Report bugs at https://github.com/JaiPizGon/NeuralSens/issues
Plot the sensitivities and sensitivity metrics of a HessMLP
object.
## S3 method for class 'HessMLP' plot( x, plotType = c("sensitivities", "time", "features", "matrix", "interactions"), ... )
## S3 method for class 'HessMLP' plot( x, plotType = c("sensitivities", "time", "features", "matrix", "interactions"), ... )
x |
|
plotType |
|
... |
additional parameters passed to plot function of the |
list of graphic objects created by ggplot
#' ## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) plot(sens) plot(sens,"time")
#' ## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) plot(sens) plot(sens,"time")
Plot the sensitivities and sensitivity metrics of a SensMLP
object.
## S3 method for class 'SensMLP' plot(x, plotType = c("sensitivities", "time", "features"), ...)
## S3 method for class 'SensMLP' plot(x, plotType = c("sensitivities", "time", "features"), ...)
x |
|
plotType |
|
... |
additional parameters passed to plot function of the |
list of graphic objects created by ggplot
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
#' ## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) plot(sens) plot(sens,"time") plot(sens,"features")
#' ## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) plot(sens) plot(sens,"time") plot(sens,"features")
Plot a neural interpretation diagram colored by sensitivities of the model
PlotSensMLP( MLP.fit, metric = "mean", sens_neg_col = "red", sens_pos_col = "blue", ... )
PlotSensMLP( MLP.fit, metric = "mean", sens_neg_col = "red", sens_pos_col = "blue", ... )
MLP.fit |
fitted neural network model |
metric |
metric to plot in the NID. It can be "mean" (default), "median or "sqmean". It can be any metric to combine the raw sensitivities |
sens_neg_col |
|
sens_pos_col |
|
... |
additional arguments passed to plotnet and/or SensAnalysisMLP |
A graphics object
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP NeuralSens::PlotSensMLP(nnetmod, trData = nntrData)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP NeuralSens::PlotSensMLP(nnetmod, trData = nntrData)
Print the sensitivities of a HessMLP
object.
## S3 method for class 'HessMLP' print(x, n = 5, round_digits = NULL, ...)
## S3 method for class 'HessMLP' print(x, n = 5, round_digits = NULL, ...)
x |
|
n |
|
round_digits |
|
... |
additional parameters |
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) sens
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) sens
Print the sensitivities of a SensMLP
object.
## S3 method for class 'SensMLP' print(x, n = 5, round_digits = NULL, ...)
## S3 method for class 'SensMLP' print(x, n = 5, round_digits = NULL, ...)
x |
|
n |
|
round_digits |
|
... |
additional parameters |
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) sens
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) sens
Print the sensitivity metrics of a HessMLP
object.
This metrics are the mean sensitivity, the standard deviation
of sensitivities and the mean of sensitivities square
## S3 method for class 'summary.HessMLP' print(x, round_digits = NULL, ...)
## S3 method for class 'summary.HessMLP' print(x, round_digits = NULL, ...)
x |
|
round_digits |
|
... |
additional parameters |
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) print(summary(sens))
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) print(summary(sens))
Print the sensitivity metrics of a SensMLP
object.
This metrics are the mean sensitivity, the standard deviation
of sensitivities and the mean of sensitivities square
## S3 method for class 'summary.SensMLP' print(x, round_digits = NULL, boot.alpha = NULL, ...)
## S3 method for class 'summary.SensMLP' print(x, round_digits = NULL, boot.alpha = NULL, ...)
x |
|
round_digits |
|
boot.alpha |
|
... |
additional parameters |
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) print(summary(sens))
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) print(summary(sens))
Function for evaluating the sensitivities of the inputs variables in a mlp model
SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## Default S3 method: SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, deractfunc = NULL, preProc = NULL, terms = NULL, output_name = NULL, ... ) ## S3 method for class 'train' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, boot.R = NULL, boot.seed = 1, boot.alpha = 0.05, ... ) ## S3 method for class 'H2OMultinomialModel' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'H2ORegressionModel' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'list' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc, ... ) ## S3 method for class 'mlp' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nn' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnet' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnetar' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'numeric' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, preProc = NULL, terms = NULL, ... )
SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## Default S3 method: SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, deractfunc = NULL, preProc = NULL, terms = NULL, output_name = NULL, ... ) ## S3 method for class 'train' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, boot.R = NULL, boot.seed = 1, boot.alpha = 0.05, ... ) ## S3 method for class 'H2OMultinomialModel' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'H2ORegressionModel' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'list' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc, ... ) ## S3 method for class 'mlp' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nn' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnet' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, preProc = NULL, terms = NULL, ... ) ## S3 method for class 'nnetar' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, ... ) ## S3 method for class 'numeric' SensAnalysisMLP( MLP.fit, .returnSens = TRUE, plot = TRUE, .rawSens = FALSE, sens_origin_layer = 1, sens_end_layer = "last", sens_origin_input = TRUE, sens_end_input = FALSE, trData, actfunc = NULL, preProc = NULL, terms = NULL, ... )
MLP.fit |
fitted neural network model |
.returnSens |
DEPRECATED |
plot |
|
.rawSens |
DEPRECATED |
sens_origin_layer |
|
sens_end_layer |
|
sens_origin_input |
|
sens_end_input |
|
... |
additional arguments passed to or from other methods |
trData |
|
actfunc |
|
deractfunc |
|
preProc |
preProcess structure applied to the training data. See also
|
terms |
function applied to the training data to create factors. See
also |
output_name |
|
boot.R |
|
boot.seed |
|
boot.alpha |
|
In case of using an input of class factor
and a package which
need to enter the input data as matrix, the dummies must be created before
training the neural network.
After that, the training data must be given to the function using the
trData
argument.
SensMLP
object with the sensitivity metrics and sensitivities of
the MLP model passed to the function.
Plot 1: colorful plot with the classification of the classes in a 2D map
Plot 2: b/w plot with probability of the chosen class in a 2D map
Plot 3: plot with the stats::predictions of the data provided
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData) # Try SensAnalysisMLP to calculate sensitivities with respect to output of hidden neurones NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, sens_origin_layer = 2, sens_end_layer = "last", sens_origin_input = FALSE, sens_end_input = FALSE) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.tr, method = "nnet", linout = TRUE, tuneGrid = data.frame(size = 3, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "RMSE") # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create a cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.tr, destination_frame = "fdata_h2o") set.seed(150) h2omod <-h2o:: h2o.deeplearning(x = names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], y = names(fdata.Reg.tr)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(h2omod) # Turn off the cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## Train RSNNS NNET ------------------------------------------------------------ # Normalize data using RSNNS algorithms trData <- as.data.frame(RSNNS::normalizeData(fdata.Reg.tr)) names(trData) <- names(fdata.Reg.tr) set.seed(150) RSNNSmod <-RSNNS::mlp(x = trData[,2:ncol(trData)], y = trData[,1], size = hidden_neurons, linOut = TRUE, learnFuncParams=c(decay), maxit=iters) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(RSNNSmod, trData = trData, output_name = "DEM") ## USE DEFAULT METHOD ---------------------------------------------------------- NeuralSens::SensAnalysisMLP(caretmod$finalModel$wts, trData = fdata.Reg.tr, mlpstr = caretmod$finalModel$n, coefnames = caretmod$coefnames, actfun = c("linear","sigmoid","linear"), output_name = "DEM") ################################################################################ ######################### CLASSIFICATION NNET ################################# ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.cl <- fdata[,2:ncol(fdata)] fdata.Reg.cl[,2:3] <- fdata.Reg.cl[,2:3]/10 fdata.Reg.cl[,1] <- fdata.Reg.cl[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) # Factorize the output fdata.Reg.cl$DEM <- factor(round(fdata.Reg.cl$DEM, digits = 1)) # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.cl, method = "nnet", linout = FALSE, tuneGrid = data.frame(size = hidden_neurons, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "Accuracy") # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create local cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.cl, destination_frame = "fdata_h2o") set.seed(150) h2omod <- h2o::h2o.deeplearning(x = names(fdata.Reg.cl)[2:ncol(fdata.Reg.cl)], y = names(fdata.Reg.cl)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(h2omod) # Apaga el cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData) # Try SensAnalysisMLP to calculate sensitivities with respect to output of hidden neurones NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, sens_origin_layer = 2, sens_end_layer = "last", sens_origin_input = FALSE, sens_end_input = FALSE) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.tr, method = "nnet", linout = TRUE, tuneGrid = data.frame(size = 3, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "RMSE") # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create a cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.tr, destination_frame = "fdata_h2o") set.seed(150) h2omod <-h2o:: h2o.deeplearning(x = names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], y = names(fdata.Reg.tr)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(h2omod) # Turn off the cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## Train RSNNS NNET ------------------------------------------------------------ # Normalize data using RSNNS algorithms trData <- as.data.frame(RSNNS::normalizeData(fdata.Reg.tr)) names(trData) <- names(fdata.Reg.tr) set.seed(150) RSNNSmod <-RSNNS::mlp(x = trData[,2:ncol(trData)], y = trData[,1], size = hidden_neurons, linOut = TRUE, learnFuncParams=c(decay), maxit=iters) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(RSNNSmod, trData = trData, output_name = "DEM") ## USE DEFAULT METHOD ---------------------------------------------------------- NeuralSens::SensAnalysisMLP(caretmod$finalModel$wts, trData = fdata.Reg.tr, mlpstr = caretmod$finalModel$n, coefnames = caretmod$coefnames, actfun = c("linear","sigmoid","linear"), output_name = "DEM") ################################################################################ ######################### CLASSIFICATION NNET ################################# ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.cl <- fdata[,2:ncol(fdata)] fdata.Reg.cl[,2:3] <- fdata.Reg.cl[,2:3]/10 fdata.Reg.cl[,1] <- fdata.Reg.cl[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) # Factorize the output fdata.Reg.cl$DEM <- factor(round(fdata.Reg.cl$DEM, digits = 1)) # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.cl, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.cl) ## Train caret NNET ------------------------------------------------------------ # Create trainControl ctrl_tune <- caret::trainControl(method = "boot", savePredictions = FALSE, summaryFunction = caret::defaultSummary) set.seed(150) #For replication caretmod <- caret::train(form = DEM~., data = fdata.Reg.cl, method = "nnet", linout = FALSE, tuneGrid = data.frame(size = hidden_neurons, decay = decay), maxit = iters, preProcess = c("center","scale"), trControl = ctrl_tune, metric = "Accuracy") # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(caretmod) ## Train h2o NNET -------------------------------------------------------------- # Create local cluster with 4 available cores h2o::h2o.init(ip = "localhost", nthreads = 4) # Reset the cluster h2o::h2o.removeAll() fdata_h2o <- h2o::as.h2o(x = fdata.Reg.cl, destination_frame = "fdata_h2o") set.seed(150) h2omod <- h2o::h2o.deeplearning(x = names(fdata.Reg.cl)[2:ncol(fdata.Reg.cl)], y = names(fdata.Reg.cl)[1], distribution = "AUTO", training_frame = fdata_h2o, standardize = TRUE, activation = "Tanh", hidden = c(hidden_neurons), stopping_rounds = 0, epochs = iters, seed = 150, model_id = "nnet_h2o", adaptive_rate = FALSE, rate_decay = decay, export_weights_and_biases = TRUE) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(h2omod) # Apaga el cluster h2o::h2o.shutdown(prompt = FALSE) rm(fdata_h2o) ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData)
Plot of sensitivities of the neural network output respect to the inputs
SensDotPlot( object, fdata = NULL, input_vars = "all", output_vars = "all", smooth = FALSE, nspline = NULL, color = NULL, grid = FALSE, ... )
SensDotPlot( object, fdata = NULL, input_vars = "all", output_vars = "all", smooth = FALSE, nspline = NULL, color = NULL, grid = FALSE, ... )
object |
fitted neural network model or |
fdata |
|
input_vars |
|
output_vars |
|
smooth |
|
nspline |
|
color |
|
grid |
|
... |
further arguments that should be passed to |
list of geom_point
plots for the inputs variables representing the
sensitivity of each output respect to the inputs
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensDotPlot NeuralSens::SensDotPlot(nnetmod, fdata = nntrData)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensDotPlot NeuralSens::SensDotPlot(nnetmod, fdata = nntrData)
Show the distribution of the sensitivities of the output
in geom_sina()
plot which color depends on the input values
SensFeaturePlot(object, fdata = NULL, ...)
SensFeaturePlot(object, fdata = NULL, ...)
object |
fitted neural network model or |
fdata |
|
... |
further arguments that should be passed to |
list of Feature sensitivity plot as described in https://www.r-bloggers.com/2019/03/a-gentle-introduction-to-shap-values-in-r/
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensFeaturePlot(sens)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensFeaturePlot(sens)
Function to plot the sensitivities created by SensAnalysisMLP
.
SensitivityPlots( sens = NULL, der = TRUE, zoom = TRUE, quit.legend = FALSE, output = 1, plot_type = NULL, inp_var = NULL, title = "Sensitivity Plots", dodge_var = FALSE )
SensitivityPlots( sens = NULL, der = TRUE, zoom = TRUE, quit.legend = FALSE, output = 1, plot_type = NULL, inp_var = NULL, title = "Sensitivity Plots", dodge_var = FALSE )
sens |
|
der |
|
zoom |
|
quit.legend |
|
output |
|
plot_type |
|
inp_var |
|
title |
|
dodge_var |
|
List with the following plot for each output:
Plot 1: colorful plot with the classification of the classes in a 2D map
Plot 2: b/w plot with probability of the chosen class in a 2D map
Plot 3: plot with the
stats::predictions of the data provided if param der
is FALSE
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensitivityPlots(sens)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensitivityPlots(sens)
Function to plot the sensitivities created by HessianMLP
.
SensMatPlot( hess, sens = NULL, output = 1, metric = c("mean", "std", "meanSensSQ"), senstype = c("matrix", "interactions"), ... )
SensMatPlot( hess, sens = NULL, output = 1, metric = c("mean", "std", "meanSensSQ"), senstype = c("matrix", "interactions"), ... )
hess |
|
sens |
|
output |
|
metric |
|
senstype |
|
... |
further argument passed similar to |
Most of the code of this function is based on
ggcorrplot()
function from package ggcorrplot
. However, due to the
inhability of changing the limits of the color scale, it keeps giving a warning
if that function is used and the color scale overwritten.
a list of ggplot
s, one for each output neuron.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP H <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensMatPlot(H) S <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensMatPlot(H, S, senstype = "interactions")
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 100 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP H <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensMatPlot(H) S <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) NeuralSens::SensMatPlot(H, S, senstype = "interactions")
Create an object of SensMLP class
SensMLP( sens = list(), raw_sens = list(), mlp_struct = numeric(), trData = data.frame(), coefnames = character(), output_name = character(), cv = NULL, boot = NULL, boot.alpha = NULL )
SensMLP( sens = list(), raw_sens = list(), mlp_struct = numeric(), trData = data.frame(), coefnames = character(), output_name = character(), cv = NULL, boot = NULL, boot.alpha = NULL )
sens |
|
raw_sens |
|
mlp_struct |
|
trData |
|
coefnames |
|
output_name |
|
cv |
|
boot |
|
boot.alpha |
|
SensMLP
object
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
Plot of sensitivity of the neural network output respect to the inputs over the time variable from the data provided
SensTimePlot( object, fdata = NULL, date.var = NULL, facet = FALSE, smooth = FALSE, nspline = NULL, ... )
SensTimePlot( object, fdata = NULL, date.var = NULL, facet = FALSE, smooth = FALSE, nspline = NULL, ... )
object |
fitted neural network model or |
fdata |
|
date.var |
|
facet |
|
smooth |
|
nspline |
|
... |
further arguments that should be passed to |
list of geom_line
plots for the inputs variables representing the
sensitivity of each output respect to the inputs over time
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR fdata[,3] <- ifelse(as.data.frame(fdata)[,3] %in% c("SUN","SAT"), 0, 1) ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensTimePlot NeuralSens::SensTimePlot(nnetmod, fdata = nntrData, date.var = NULL)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR fdata[,3] <- ifelse(as.data.frame(fdata)[,3] %in% c("SUN","SAT"), 0, 1) ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensTimePlot NeuralSens::SensTimePlot(nnetmod, fdata = nntrData, date.var = NULL)
data.frame
with 2000 rows of 4 columns with 3
input variables X1, X2, X3
and one output variable Y
.
The data is already scaled, and has been generated using the following code:
set.seed(150)
simdata <- data.frame(
"X1" = rnorm(2000, 0, 1),
"X2" = rnorm(2000, 0, 1),
"X3" = rnorm(2000, 0, 1)
)
simdata$Y <- simdata$X1^2 + 0.5*simdata$X2 + 0.1*rnorm(2000, 0, 1)
A data frame with 2000 rows and 4 variables:
Random input 1
Random input 2
Random input 3
Output
Jaime Pizarroso Gonzalo
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
Print the sensitivity metrics of a HessMLP
object.
This metrics are the mean sensitivity, the standard deviation
of sensitivities and the mean of sensitivities square
## S3 method for class 'HessMLP' summary(object, ...)
## S3 method for class 'HessMLP' summary(object, ...)
object |
|
... |
additional parameters |
summary object of the HessMLP
object passed
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) summary(sens)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try HessianMLP sens <- NeuralSens::HessianMLP(nnetmod, trData = nntrData, plot = FALSE) summary(sens)
Print the sensitivity metrics of a SensMLP
object.
This metrics are the mean sensitivity, the standard deviation
of sensitivities and the mean of sensitivities square
## S3 method for class 'SensMLP' summary(object, ...)
## S3 method for class 'SensMLP' summary(object, ...)
object |
|
... |
additional parameters |
summary object of the SensMLP
object passed
Pizarroso J, Portela J, Muñoz A (2022). NeuralSens: Sensitivity Analysis of Neural Networks. Journal of Statistical Software, 102(7), 1-36.
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) summary(sens)
## Load data ------------------------------------------------------------------- data("DAILY_DEMAND_TR") fdata <- DAILY_DEMAND_TR ## Parameters of the NNET ------------------------------------------------------ hidden_neurons <- 5 iters <- 250 decay <- 0.1 ################################################################################ ######################### REGRESSION NNET ##################################### ################################################################################ ## Regression dataframe -------------------------------------------------------- # Scale the data fdata.Reg.tr <- fdata[,2:ncol(fdata)] fdata.Reg.tr[,3] <- fdata.Reg.tr[,3]/10 fdata.Reg.tr[,1] <- fdata.Reg.tr[,1]/1000 # Normalize the data for some models preProc <- caret::preProcess(fdata.Reg.tr, method = c("center","scale")) nntrData <- predict(preProc, fdata.Reg.tr) #' ## TRAIN nnet NNET -------------------------------------------------------- # Create a formula to train NNET form <- paste(names(fdata.Reg.tr)[2:ncol(fdata.Reg.tr)], collapse = " + ") form <- formula(paste(names(fdata.Reg.tr)[1], form, sep = " ~ ")) set.seed(150) nnetmod <- nnet::nnet(form, data = nntrData, linear.output = TRUE, size = hidden_neurons, decay = decay, maxit = iters) # Try SensAnalysisMLP sens <- NeuralSens::SensAnalysisMLP(nnetmod, trData = nntrData, plot = FALSE) summary(sens)