Skip to content

Commit

Permalink
Adds ml_utils() tests
Browse files Browse the repository at this point in the history
  • Loading branch information
edgararuiz committed Jan 2, 2024
1 parent 177be2f commit 151e488
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 14 deletions.
21 changes: 21 additions & 0 deletions tests/testthat/_snaps/ml-utils.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
# ml_formula() works

Code
ml_formula(am ~ mpg, mtcars)
Output
$label
[1] "am"
$features
[1] "mpg"

# ml_installed() works on simulated interactive session

Code
ml_installed()
Message
! Required Python libraries to run ML functions are missing
Could not find: torch, torcheval, and scikit-learn
Do you wish to install? (This will be a one time operation)

11 changes: 11 additions & 0 deletions tests/testthat/helper-utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -112,3 +112,14 @@ test_databricks_cluster_version <- function() {
}
.test_env$dbr
}

test_databricks_stump_env <- function() {
env_name <- use_envname(
version = test_databricks_cluster_version(),
backend = "databricks"
)
if (names(env_name) != "exact") {
py_install("numpy", env_name, pip = TRUE, python = Sys.which("python"))
}
path(reticulate::virtualenv_python(env_name))
}
17 changes: 3 additions & 14 deletions tests/testthat/test-deploy.R
Original file line number Diff line number Diff line change
@@ -1,20 +1,9 @@
skip_if_not_databricks()

test_databricks_deploy_env_path <- function() {
env_name <- use_envname(
version = test_databricks_cluster_version(),
backend = "databricks"
)
if (names(env_name) != "exact") {
py_install("numpy", env_name, pip = TRUE, python = Sys.which("python"))
}
path(reticulate::virtualenv_python(env_name))
}

test_databricks_deploy_output <- function() {
list(
appDir = path(getwd()),
python = test_databricks_deploy_env_path(),
python = test_databricks_stump_env(),
envVars = c("DATABRICKS_HOST", "DATABRICKS_TOKEN"),
server = "my_server",
account = "my_account",
Expand Down Expand Up @@ -44,7 +33,7 @@ test_that("Basic use, passing DBR version works", {
accounts = function(...) accounts_df()
)
# Initializes environment
invisible(test_databricks_deploy_env_path())
invisible(test_databricks_stump_env())

expect_equal(
deploy_databricks(version = test_databricks_cluster_version()),
Expand Down Expand Up @@ -209,7 +198,7 @@ test_that("Rare cases for finding environments works", {
withr::with_envvar(
new = c("WORKON_HOME" = use_test_env()),
{
env_path <- test_databricks_deploy_env_path()
env_path <- test_databricks_stump_env()
local_mocked_bindings(
py_exe = function(...) {
return(NULL)
Expand Down
59 changes: 59 additions & 0 deletions tests/testthat/test-ml-utils.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
test_that("ml_formula() works", {
expect_snapshot(ml_formula(am ~ mpg, mtcars))
expect_error(
ml_formula(am ~ mpg * cyl, mtcars),
"Formula resulted in an invalid parameter set"
)
})

test_that("snake_to_camel() works", {
expect_equal(
snake_to_camel("var_one"),
"varOne"
)
})

test_that("ml_connect_not_supported() works", {
expect_silent(
ml_connect_not_supported(
args = list(),
not_supported = c(
"elastic_net_param", "reg_param", "threshold",
"aggregation_depth", "fit_intercept",
"raw_prediction_col", "uid", "weight_col"
)
)
)

expect_error(
ml_connect_not_supported(
args = list(reg_param = 1),
not_supported = c(
"elastic_net_param", "reg_param", "threshold",
"aggregation_depth", "fit_intercept",
"raw_prediction_col", "uid", "weight_col"
),
"The following argument(s) are not supported by Spark Connect:"
)
)
})

test_that("ml_installed() works on simulated interactive session", {
test_databricks_stump_env()
withr::with_envvar(
new = c("WORKON_HOME" = use_new_test_env()),
{
local_mocked_bindings(
check_interactive = function(...) TRUE,
check_rstudio = function(...) TRUE,
menu = function(...) {
return(1)
},
py_install = function(...) invisible()
)
expect_snapshot(
ml_installed()
)
}
)
})

0 comments on commit 151e488

Please sign in to comment.