diff --git a/R/SklearnClassifierSettings.R b/R/SklearnClassifierSettings.R index 61193ab1..8f29ba3c 100644 --- a/R/SklearnClassifierSettings.R +++ b/R/SklearnClassifierSettings.R @@ -98,7 +98,7 @@ setAdaBoost <- function(nEstimators = list(10, 50, 200), } -adaBoostClassifierInputs <- function(classifier, param) { +AdaBoostClassifierInputs <- function(classifier, param) { model <- classifier( n_estimators = param[[which.max(names(param) == "nEstimators")]], learning_rate = param[[which.max(names(param) == "learningRate")]], @@ -330,7 +330,7 @@ setDecisionTree <- function(criterion = list("gini"), } -decisionTreeClassifierInputs <- function(classifier, param) { +DecisionTreeClassifierInputs <- function(classifier, param) { model <- classifier( criterion = param[[which.max(names(param) == "criterion")]], splitter = param[[which.max(names(param) == "splitter")]], @@ -568,7 +568,7 @@ setNaiveBayes <- function() { return(result) } -gaussianNBInputs <- function(classifier, param) { +GaussianNBInputs <- function(classifier, param) { model <- classifier() return(model) @@ -727,7 +727,7 @@ setRandomForest <- function(ntrees = list(100, 500), } -randomForestClassifierInputs <- function(classifier, param) { +RandomForestClassifierInputs <- function(classifier, param) { model <- classifier( n_estimators = param[[which.max(names(param) == "ntrees")]], criterion = param[[which.max(names(param) == "criterion")]], diff --git a/man/createFeatureEngineeringSettings.Rd b/man/createFeatureEngineeringSettings.Rd index 0b7a0a8d..9772a9a1 100644 --- a/man/createFeatureEngineeringSettings.Rd +++ b/man/createFeatureEngineeringSettings.Rd @@ -8,7 +8,7 @@ createFeatureEngineeringSettings(type = "none") } \arguments{ \item{type}{(character) Choice of: \itemize{ -\item'none' No feature engineering - this is the default +\item'none' No feature engineering - this is the default }} } \value{ diff --git a/man/setAdaBoost.Rd b/man/setAdaBoost.Rd index adcc4f6f..a13762b2 100644 --- a/man/setAdaBoost.Rd +++ b/man/setAdaBoost.Rd @@ -7,7 +7,7 @@ setAdaBoost( nEstimators = list(10, 50, 200), learningRate = list(1, 0.5, 0.1), - algorithm = list("SAMME.R"), + algorithm = list("SAMME"), seed = sample(1000000, 1) ) } @@ -17,7 +17,7 @@ setAdaBoost( \item{learningRate}{(list) Weight applied to each classifier at each boosting iteration. A higher learning rate increases the contribution of each classifier. There is a trade-off between the learningRate and nEstimators parameters There is a trade-off between learningRate and nEstimators.} -\item{algorithm}{(list) If ‘SAMME.R’ then use the SAMME.R real boosting algorithm. base_estimator must support calculation of class probabilities. If ‘SAMME’ then use the SAMME discrete boosting algorithm. The SAMME.R algorithm typically converges faster than SAMME, achieving a lower test error with fewer boosting iterations.} +\item{algorithm}{Only ‘SAMME’ can be provided. The 'algorithm' argument will be deprecated in scikit-learn 1.8.} \item{seed}{A seed for the model} } @@ -26,8 +26,9 @@ Create setting for AdaBoost with python DecisionTreeClassifier base estimator } \examples{ \dontrun{ -model.adaBoost <- setAdaBoost(nEstimators = list(10,50,200), learningRate = list(1, 0.5, 0.1), - algorithm = list('SAMME.R'), seed = sample(1000000,1) - ) +model.adaBoost <- setAdaBoost( + nEstimators = list(10, 50, 200), learningRate = list(1, 0.5, 0.1), + algorithm = list("SAMME.R"), seed = sample(1000000, 1) +) } } diff --git a/man/setDecisionTree.Rd b/man/setDecisionTree.Rd index 96f56e74..b5633772 100644 --- a/man/setDecisionTree.Rd +++ b/man/setDecisionTree.Rd @@ -2,7 +2,7 @@ % Please edit documentation in R/SklearnClassifierSettings.R \name{setDecisionTree} \alias{setDecisionTree} -\title{Create setting for the scikit-learn 1.0.1 DecisionTree with python} +\title{Create setting for the scikit-learn DecisionTree with python} \usage{ setDecisionTree( criterion = list("gini"), @@ -42,10 +42,10 @@ setDecisionTree( \item{seed}{The random state seed} } \description{ -Create setting for the scikit-learn 1.0.1 DecisionTree with python +Create setting for the scikit-learn DecisionTree with python } \examples{ \dontrun{ -model.decisionTree <- setDecisionTree(maxDepth=10,minSamplesLeaf=10, seed=NULL ) +model.decisionTree <- setDecisionTree(maxDepth = 10, minSamplesLeaf = 10, seed = NULL) } } diff --git a/man/setRandomForest.Rd b/man/setRandomForest.Rd index dbe7ad0f..7812e522 100644 --- a/man/setRandomForest.Rd +++ b/man/setRandomForest.Rd @@ -65,7 +65,9 @@ Create setting for random forest model with python (very fast) } \examples{ \dontrun{ -model.rf <- setRandomForest(mtries=list('auto',5,20), ntrees=c(10,100), - maxDepth=c(5,20)) +model.rf <- setRandomForest( + mtries = list("auto", 5, 20), ntrees = c(10, 100), + maxDepth = c(5, 20) +) } } diff --git a/man/setSVM.Rd b/man/setSVM.Rd index f21a202e..0b7af491 100644 --- a/man/setSVM.Rd +++ b/man/setSVM.Rd @@ -43,6 +43,6 @@ Create setting for the python sklearn SVM (SVC function) } \examples{ \dontrun{ -model.svm <- setSVM(kernel='rbf', seed = NULL) +model.svm <- setSVM(kernel = "rbf", seed = NULL) } } diff --git a/tests/testthat/test-sklearnClassifierSettings.R b/tests/testthat/test-sklearnClassifierSettings.R index f71d79dd..ab56a389 100644 --- a/tests/testthat/test-sklearnClassifierSettings.R +++ b/tests/testthat/test-sklearnClassifierSettings.R @@ -20,7 +20,7 @@ test_that("setAdaBoost settings work checks", { expect_equal(attr(adset$param, "settings")$pythonClass, "AdaBoostClassifier") - inputs <- adaBoostClassifierInputs(list, adset$param[[1]]) + inputs <- AdaBoostClassifierInputs(list, adset$param[[1]]) expect_equal( names(inputs), c("n_estimators", "learning_rate", "algorithm", "random_state") @@ -101,7 +101,7 @@ test_that("setNaiveBayes settings work checks", { expect_equal(attr(nbset$param, "settings")$pythonModule, "sklearn.naive_bayes") expect_equal(attr(nbset$param, "settings")$pythonClass, "GaussianNB") - inputs <- gaussianNBInputs(list, nbset$param[[1]]) + inputs <- GaussianNBInputs(list, nbset$param[[1]]) expect_equal(names(inputs), NULL) }) @@ -137,7 +137,7 @@ test_that("setRandomForest settings work checks", { expect_equal(attr(rfset$param, "settings")$pythonModule, "sklearn.ensemble") expect_equal(attr(rfset$param, "settings")$pythonClass, "RandomForestClassifier") - inputs <- randomForestClassifierInputs(list, rfset$param[[1]]) + inputs <- RandomForestClassifierInputs(list, rfset$param[[1]]) expect_equal( names(inputs), c(