Title: | Randomized and Quasi-Randomized networks for Statistical/Machine Learning |
---|---|
Description: | Randomized and Quasi-Randomized networks for Statistical/Machine Learning |
Authors: | T. Moudiki |
Maintainer: | T. Moudiki <[email protected]> |
License: | BSD_3_clause Clear + file LICENSE |
Version: | 0.20.6 |
Built: | 2024-10-26 05:26:55 UTC |
Source: | https://github.com/Techtonique/nnetsauce_r |
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
AdaBoostClassifier( obj, n_estimators = 10L, learning_rate = 0.1, n_hidden_features = 1L, reg_lambda = 0, reg_alpha = 0.5, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = FALSE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, verbose = 1, method = "SAMME", backend = c("cpu", "gpu", "tpu") )
AdaBoostClassifier( obj, n_estimators = 10L, learning_rate = 0.1, n_hidden_features = 1L, reg_lambda = 0, reg_alpha = 0.5, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = FALSE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, verbose = 1, method = "SAMME", backend = c("cpu", "gpu", "tpu") )
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L n <- dim(X)[1] p <- dim(X)[2] set.seed(213) train_index <- sample(x = 1:n, size = floor(0.8*n), replace = TRUE) test_index <- -train_index X_train <- as.matrix(iris[train_index, 1:4]) y_train <- as.integer(iris[train_index, 5]) - 1L X_test <- as.matrix(iris[test_index, 1:4]) y_test <- as.integer(iris[test_index, 5]) - 1L # ValueError: Sample weights must be 1D array or scalar # obj <- sklearn$tree$DecisionTreeClassifier() # obj2 <- AdaBoostClassifier(obj) # obj2$fit(X_train, y_train) # print(obj2$score(X_test, y_test)) # print(obj2$predict_proba(X_test))
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L n <- dim(X)[1] p <- dim(X)[2] set.seed(213) train_index <- sample(x = 1:n, size = floor(0.8*n), replace = TRUE) test_index <- -train_index X_train <- as.matrix(iris[train_index, 1:4]) y_train <- as.integer(iris[train_index, 5]) - 1L X_test <- as.matrix(iris[test_index, 1:4]) y_test <- as.integer(iris[test_index, 5]) - 1L # ValueError: Sample weights must be 1D array or scalar # obj <- sklearn$tree$DecisionTreeClassifier() # obj2 <- AdaBoostClassifier(obj) # obj2$fit(X_train, y_train) # print(obj2$score(X_test, y_test)) # print(obj2$predict_proba(X_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
BaseRegressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
BaseRegressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) n <- dim(X)[1] p <- dim(X)[2] set.seed(213) train_index <- sample(x = 1:n, size = floor(0.8*n), replace = TRUE) test_index <- -train_index X_train <- as.matrix(X[train_index, ]) y_train <- y[train_index] X_test <- as.matrix(X[test_index, ]) y_test <- y[test_index] obj <- BaseRegressor(n_hidden_features=10L, dropout=0.9) print(obj$fit(X_train, y_train)) print(obj$score(X_test, y_test))
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) n <- dim(X)[1] p <- dim(X)[2] set.seed(213) train_index <- sample(x = 1:n, size = floor(0.8*n), replace = TRUE) test_index <- -train_index X_train <- as.matrix(X[train_index, ]) y_train <- y[train_index] X_test <- as.matrix(X[test_index, ]) y_test <- y[test_index] obj <- BaseRegressor(n_hidden_features=10L, dropout=0.9) print(obj$fit(X_train, y_train)) print(obj$score(X_test, y_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/ #' @return
BayesianRVFL2Regressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", s1 = 0.1, s2 = 0.1, sigma = 0.05, seed = 123L, backend = c("cpu", "gpu", "tpu") )
BayesianRVFL2Regressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", s1 = 0.1, s2 = 0.1, sigma = 0.05, seed = 123L, backend = c("cpu", "gpu", "tpu") )
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- BayesianRVFL2Regressor(n_hidden_features = 5L, s1=0.01) print(obj$fit(X_train, y_train)) print(obj$score(X_test, y_test))
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- BayesianRVFL2Regressor(n_hidden_features = 5L, s1=0.01) print(obj$fit(X_train, y_train)) print(obj$score(X_test, y_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
BayesianRVFLRegressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", s = 0.1, sigma = 0.05, seed = 123L, backend = c("cpu", "gpu", "tpu") )
BayesianRVFLRegressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", s = 0.1, sigma = 0.05, seed = 123L, backend = c("cpu", "gpu", "tpu") )
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- BayesianRVFLRegressor(n_hidden_features = 5L) print(obj$fit(X_train, y_train)) print(obj$score(X_test, y_test))
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- BayesianRVFLRegressor(n_hidden_features = 5L) print(obj$fit(X_train, y_train)) print(obj$score(X_test, y_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
CustomClassifier( obj, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
CustomClassifier( obj, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$tree$DecisionTreeClassifier() obj2 <- CustomClassifier(obj) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test))
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$tree$DecisionTreeClassifier() obj2 <- CustomClassifier(obj) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
CustomRegressor( obj, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
CustomRegressor( obj, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$linear_model$ElasticNet() obj2 <- CustomRegressor(obj) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test))
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$linear_model$ElasticNet() obj2 <- CustomRegressor(obj) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test))
See also https://techtonique.github.io/nnetsauce/
DeepClassifier(obj, n_layers = 3L, ...)
DeepClassifier(obj, n_layers = 3L, ...)
obj |
a model object |
n_layers |
number of hidden layers |
... |
additional parameters to be passed to |
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj2 <- sklearn$linear_model$ElasticNet() obj <- DeepClassifier(obj2, n_layers = 3L) res <- obj$fit(X_train, y_train) print(obj$predict(X_test))
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj2 <- sklearn$linear_model$ElasticNet() obj <- DeepClassifier(obj2, n_layers = 3L) res <- obj$fit(X_train, y_train) print(obj$predict(X_test))
See also https://techtonique.github.io/nnetsauce/
DeepMTS(obj, n_layers = 3L, ...)
DeepMTS(obj, n_layers = 3L, ...)
obj |
a model object |
n_layers |
number of hidden layers |
... |
additional parameters to be passed to |
set.seed(123) X <- matrix(rnorm(300), 100, 3) obj <- sklearn$linear_model$ElasticNet() obj2 <- DeepMTS(obj) obj2$fit(X) obj2$predict()
set.seed(123) X <- matrix(rnorm(300), 100, 3) obj <- sklearn$linear_model$ElasticNet() obj2 <- DeepMTS(obj) obj2$fit(X) obj2$predict()
See also https://techtonique.github.io/nnetsauce/
DeepRegressor(obj, n_layers = 3L, ...)
DeepRegressor(obj, n_layers = 3L, ...)
obj |
a model object |
n_layers |
number of hidden layers |
... |
additional parameters to be passed to |
X <- MASS::Boston[,-14] # dataset has an ethical problem y <- MASS::Boston$medv set.seed(13) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj2 <- sklearn$linear_model$ElasticNet() obj <- DeepRegressor(obj2, n_layers = 3L, n_clusters=2L) res <- obj$fit(X_train, y_train) print(obj$predict(X_test))
X <- MASS::Boston[,-14] # dataset has an ethical problem y <- MASS::Boston$medv set.seed(13) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj2 <- sklearn$linear_model$ElasticNet() obj <- DeepRegressor(obj2, n_layers = 3L, n_clusters=2L) res <- obj$fit(X_train, y_train) print(obj$predict(X_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
GLMClassifier( n_hidden_features = 5L, lambda1 = 0.01, alpha1 = 0.5, lambda2 = 0.01, alpha2 = 0.5, family = "expit", activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", type_scaling = c("std", "std", "std"), optimizer = ns$Optimizer(), seed = 123L )
GLMClassifier( n_hidden_features = 5L, lambda1 = 0.01, alpha1 = 0.5, lambda2 = 0.01, alpha2 = 0.5, family = "expit", activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", type_scaling = c("std", "std", "std"), optimizer = ns$Optimizer(), seed = 123L )
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- GLMClassifier() obj$fit(X_train, y_train) print(obj$score(X_test, y_test))
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- GLMClassifier() obj$fit(X_train, y_train) print(obj$score(X_test, y_test))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
GLMRegressor( n_hidden_features = 5L, lambda1 = 0.01, alpha1 = 0.5, lambda2 = 0.01, alpha2 = 0.5, family = "gaussian", activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", type_scaling = c("std", "std", "std"), optimizer = ns$Optimizer(), seed = 123L )
GLMRegressor( n_hidden_features = 5L, lambda1 = 0.01, alpha1 = 0.5, lambda2 = 0.01, alpha2 = 0.5, family = "gaussian", activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", type_scaling = c("std", "std", "std"), optimizer = ns$Optimizer(), seed = 123L )
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- GLMRegressor() obj$fit(X_train, y_train) print(obj$score(X_test, y_test))
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- GLMRegressor() obj$fit(X_train, y_train) print(obj$score(X_test, y_test))
See also https://techtonique.github.io/nnetsauce/
LazyClassifier( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, ... )
LazyClassifier( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, ... )
verbose |
monitor progress ( |
ignore_warnings |
print trace when model fitting failed |
custom_metric |
defining a custom metric (default is |
predictions |
obtain predictions (default is |
random_state |
reproducibility seed |
estimators |
specify classifiers to be adjusted (default is 'all') |
preprocess |
preprocessing input covariates (default is FALSE |
... |
additional parameters to be passed to |
a list that you can $fit
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyClassifier() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyClassifier() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
See also https://techtonique.github.io/nnetsauce/
LazyDeepClassifier( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, n_layers = 3L, ... )
LazyDeepClassifier( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, n_layers = 3L, ... )
verbose |
monitor progress ( |
ignore_warnings |
print trace when model fitting failed |
custom_metric |
defining a custom metric (default is |
predictions |
obtain predictions (default is |
random_state |
reproducibility seed |
estimators |
specify classifiers to be adjusted (default is 'all') |
preprocess |
preprocessing input covariates (default is FALSE |
n_layers |
number of layers for the deep model |
... |
additional parameters to be passed to |
a list that you can $fit
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyDeepClassifier() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
library(datasets) set.seed(123) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris$Species) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyDeepClassifier() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
See also https://techtonique.github.io/nnetsauce/
LazyDeepMTS( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, show_progress = TRUE, n_layers = 3L, ... )
LazyDeepMTS( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, show_progress = TRUE, n_layers = 3L, ... )
verbose |
monitor progress ( |
ignore_warnings |
print trace when model fitting failed |
custom_metric |
defining a custom metric (default is |
predictions |
obtain predictions (default is |
random_state |
reproducibility seed |
estimators |
specify regressors to be adjusted (default is 'all') |
preprocess |
preprocessing input covariates (default is FALSE |
n_layers |
number of layers for the deep model |
... |
additional parameters to be passed to |
a list that you can $fit
set.seed(123) X <- matrix(rnorm(300), 100, 3) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- data.frame(X[index_train, ]) X_test <- data.frame(X[-index_train, ]) obj <- LazyDeepMTS() res <- obj$fit(X_train, X_test) print(res[[1]])
set.seed(123) X <- matrix(rnorm(300), 100, 3) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- data.frame(X[index_train, ]) X_test <- data.frame(X[-index_train, ]) obj <- LazyDeepMTS() res <- obj$fit(X_train, X_test) print(res[[1]])
See also https://techtonique.github.io/nnetsauce/
LazyDeepRegressor( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, n_layers = 3L, ... )
LazyDeepRegressor( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, n_layers = 3L, ... )
verbose |
monitor progress ( |
ignore_warnings |
print trace when model fitting failed |
custom_metric |
defining a custom metric (default is |
predictions |
obtain predictions (default is |
random_state |
reproducibility seed |
estimators |
specify regressors to be adjusted (default is 'all') |
preprocess |
preprocessing input covariates (default is FALSE |
n_layers |
number of layers for the deep model |
... |
additional parameters to be passed to |
a list that you can $fit
X <- MASS::Boston[,-14] # dataset has an ethical problem y <- MASS::Boston$medv set.seed(13) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyDeepRegressor() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
X <- MASS::Boston[,-14] # dataset has an ethical problem y <- MASS::Boston$medv set.seed(13) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyDeepRegressor() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
See also https://techtonique.github.io/nnetsauce/
LazyMTS( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, show_progress = TRUE, ... )
LazyMTS( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, show_progress = TRUE, ... )
verbose |
monitor progress ( |
ignore_warnings |
print trace when model fitting failed |
custom_metric |
defining a custom metric (default is |
predictions |
obtain predictions (default is |
random_state |
reproducibility seed |
estimators |
specify regressors to be adjusted (default is 'all') |
preprocess |
preprocessing input covariates (default is FALSE |
... |
additional parameters to be passed to |
a list that you can $fit
set.seed(123) X <- matrix(rnorm(300), 100, 3) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- data.frame(X[index_train, ]) X_test <- data.frame(X[-index_train, ]) obj <- LazyMTS() res <- obj$fit(X_train, X_test) print(res[[1]])
set.seed(123) X <- matrix(rnorm(300), 100, 3) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- data.frame(X[index_train, ]) X_test <- data.frame(X[-index_train, ]) obj <- LazyMTS() res <- obj$fit(X_train, X_test) print(res[[1]])
See also https://techtonique.github.io/nnetsauce/
LazyRegressor( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, ... )
LazyRegressor( verbose = 0, ignore_warnings = TRUE, custom_metric = NULL, predictions = FALSE, random_state = 42L, estimators = "all", preprocess = FALSE, ... )
verbose |
monitor progress ( |
ignore_warnings |
print trace when model fitting failed |
custom_metric |
defining a custom metric (default is |
predictions |
obtain predictions (default is |
random_state |
reproducibility seed |
estimators |
specify regressors to be adjusted (default is 'all') |
preprocess |
preprocessing input covariates (default is FALSE |
... |
additional parameters to be passed to |
a list that you can $fit
X <- MASS::Boston[,-14] # dataset has an ethical problem y <- MASS::Boston$medv set.seed(13) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyRegressor() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
X <- MASS::Boston[,-14] # dataset has an ethical problem y <- MASS::Boston$medv set.seed(13) (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- LazyRegressor() res <- obj$fit(X_train, X_test, y_train, y_test) print(res[[1]])
Parameters description can be found at https://techtonique.github.io/nnetsauce/
MTS( obj, start_input = NULL, frequency_input = NULL, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lags = 1L, replications = NULL, kernel = NULL, agg = "mean", seed = 123L, backend = c("cpu", "gpu", "tpu"), verbose = 0 )
MTS( obj, start_input = NULL, frequency_input = NULL, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lags = 1L, replications = NULL, kernel = NULL, agg = "mean", seed = 123L, backend = c("cpu", "gpu", "tpu"), verbose = 0 )
# Example 1 ----- set.seed(123) X <- matrix(rnorm(300), 100, 3) obj <- sklearn$linear_model$ElasticNet() obj2 <- MTS(obj) obj2$fit(X) obj2$predict() # Example 2 ----- set.seed(123) X <- matrix(rnorm(300), 100, 3) obj <- sklearn$linear_model$BayesianRidge() obj2 <- MTS(obj) obj2$fit(X) obj2$predict(return_std = TRUE)
# Example 1 ----- set.seed(123) X <- matrix(rnorm(300), 100, 3) obj <- sklearn$linear_model$ElasticNet() obj2 <- MTS(obj) obj2$fit(X) obj2$predict() # Example 2 ----- set.seed(123) X <- matrix(rnorm(300), 100, 3) obj <- sklearn$linear_model$BayesianRidge() obj2 <- MTS(obj) obj2$fit(X) obj2$predict(return_std = TRUE)
Parameters description can be found at https://techtonique.github.io/nnetsauce/
MultitaskClassifier( obj, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
MultitaskClassifier( obj, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$linear_model$LinearRegression() obj2 <- MultitaskClassifier(obj) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test)) print(obj2$predict_proba(X_test))
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$linear_model$LinearRegression() obj2 <- MultitaskClassifier(obj) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test)) print(obj2$predict_proba(X_test))
Plot multivariate time series forecast or residuals
## S3 method for class 'MTS' plot(x, selected_series, level = 95, ...)
## S3 method for class 'MTS' plot(x, selected_series, level = 95, ...)
x |
result from |
selected_series |
name of the time series selected for plotting |
level |
confidence levels for prediction intervals |
... |
additional parameters to be passed to |
type |
"pi": basic prediction intervals; "dist": a distribution of predictions; "sims": the simulations |
Parameters description can be found at https://techtonique.github.io/nnetsauce/
RandomBagClassifier( obj, n_estimators = 50L, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = FALSE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, n_jobs = NULL, seed = 123L, verbose = 1L, backend = c("cpu", "gpu", "tpu") )
RandomBagClassifier( obj, n_estimators = 50L, n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = FALSE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, n_jobs = NULL, seed = 123L, verbose = 1L, backend = c("cpu", "gpu", "tpu") )
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$tree$DecisionTreeClassifier() obj2 <- RandomBagClassifier(obj, n_estimators=50L, n_hidden_features=5L) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test)) print(obj2$predict_proba(X_test))
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- sklearn$tree$DecisionTreeClassifier() obj2 <- RandomBagClassifier(obj, n_estimators=50L, n_hidden_features=5L) obj2$fit(X_train, y_train) print(obj2$score(X_test, y_test)) print(obj2$predict_proba(X_test))
Parameters description can be found at https://techtonique.github.io/nnetsauce/
RandomBagRegressor( obj, n_estimators = 10L, n_hidden_features = 1L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = FALSE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, n_jobs = NULL, seed = 123L, verbose = 1L, backend = c("cpu", "gpu", "tpu") )
RandomBagRegressor( obj, n_estimators = 10L, n_hidden_features = 1L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = FALSE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", col_sample = 1, row_sample = 1, n_jobs = NULL, seed = 123L, verbose = 1L, backend = c("cpu", "gpu", "tpu") )
library(datasets) n <- 20 ; p <- 5 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) obj <- sklearn$tree$DecisionTreeRegressor() obj2 <- RandomBagRegressor(obj) obj2$fit(X[1:12,], y[1:12]) print(obj2$score(X[13:20, ], y[13:20]))
library(datasets) n <- 20 ; p <- 5 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) obj <- sklearn$tree$DecisionTreeRegressor() obj2 <- RandomBagRegressor(obj) obj2$fit(X[1:12,], y[1:12]) print(obj2$score(X[13:20, ], y[13:20]))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
Ridge2Classifier( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lambda1 = 0.1, lambda2 = 0.1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
Ridge2Classifier( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, direct_link = TRUE, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lambda1 = 0.1, lambda2 = 0.1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- Ridge2Classifier() obj$fit(X_train, y_train) print(obj$score(X_test, y_test)) print(obj$predict_proba(X_train))
library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- Ridge2Classifier() obj$fit(X_train, y_train) print(obj$score(X_test, y_test)) print(obj$predict_proba(X_train))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
Ridge2MultitaskClassifier( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lambda1 = 0.1, lambda2 = 0.1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
Ridge2MultitaskClassifier( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lambda1 = 0.1, lambda2 = 0.1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
# Example 1 ----- library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- Ridge2MultitaskClassifier() obj$fit(X_train, y_train) print(obj$score(X_test, y_test)) print(obj$predict_proba(X_train))
# Example 1 ----- library(datasets) X <- as.matrix(iris[, 1:4]) y <- as.integer(iris[, 5]) - 1L (index_train <- base::sample.int(n = nrow(X), size = floor(0.8*nrow(X)), replace = FALSE)) X_train <- X[index_train, ] y_train <- y[index_train] X_test <- X[-index_train, ] y_test <- y[-index_train] obj <- Ridge2MultitaskClassifier() obj$fit(X_train, y_train) print(obj$score(X_test, y_test)) print(obj$predict_proba(X_train))
Parameters' description can be found at https://techtonique.github.io/nnetsauce/
Ridge2Regressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lambda1 = 0.1, lambda2 = 0.1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
Ridge2Regressor( n_hidden_features = 5L, activation_name = "relu", a = 0.01, nodes_sim = "sobol", bias = TRUE, dropout = 0, n_clusters = 2L, cluster_encode = TRUE, type_clust = "kmeans", lambda1 = 0.1, lambda2 = 0.1, seed = 123L, backend = c("cpu", "gpu", "tpu") )
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) obj <- nnetsauce::Ridge2Regressor(n_hidden_features = 5L) print(obj$fit(X, y)) print(obj$score(X, y))
set.seed(123) n <- 50 ; p <- 3 X <- matrix(rnorm(n * p), n, p) # no intercept! y <- rnorm(n) obj <- nnetsauce::Ridge2Regressor(n_hidden_features = 5L) print(obj$fit(X, y)) print(obj$score(X, y))