Skip to content

Commit

Permalink
Merge pull request #98 from mlverse/updates
Browse files Browse the repository at this point in the history
Additional testing
  • Loading branch information
edgararuiz authored Dec 22, 2023
2 parents 0d25fcf + 9e1da93 commit cfaffe7
Show file tree
Hide file tree
Showing 6 changed files with 114 additions and 0 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@
.Renviron
.saved_model
derby.log
spark-warehouse
1 change: 1 addition & 0 deletions tests/testthat/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
spark-warehouse
30 changes: 30 additions & 0 deletions tests/testthat/_snaps/python-install.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,33 @@
# Databricks installation works

Code
out
Output
$libs
[1] "databricks-connect"
$version
[1] "14.1"
$envname
NULL
$python_version
[1] ">=3.9"
$new_env
[1] TRUE
$method
[1] "auto" "virtualenv" "conda"
$as_job
[1] TRUE
$install_ml
[1] FALSE

# Install code is correctly created

Code
Expand Down
8 changes: 8 additions & 0 deletions tests/testthat/test-data-write.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
test_that("Write table works", {
tbl_mtcars <- use_test_table_mtcars()
expect_silent(
spark_write_table(tbl_mtcars, "new_mtcars")
)
dir_delete(test_path("spark-warehouse"))
})

test_that("CSV works", {
sc <- use_test_spark_connect()
tbl_mtcars <- use_test_table_mtcars()
Expand Down
41 changes: 41 additions & 0 deletions tests/testthat/test-databricks-utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,51 @@ test_that("DBR error code returns as expected", {

test_that("Databricks Host works", {
expect_true(nchar(databricks_host()) > 5)

expect_named(databricks_host("thisisatest"), "argument")

expect_error(
withr::with_envvar(
new = c("DATABRICKS_HOST" = NA, "DATABRICKS_TOKEN" = NA),
{
databricks_host()
}),
"No Host URL was provided"
)

expect_named(
withr::with_envvar(
new = c("DATABRICKS_HOST" = NA, "CONNECT_DATABRICKS_HOST" = "testing"),
{
databricks_host()
}),
"environment_connect"
)

})

test_that("Databricks Token works", {
expect_true(nchar(databricks_token()) > 5)

expect_named(databricks_token("thisisatest"), "argument")

expect_error(
withr::with_envvar(
new = c("DATABRICKS_HOST" = NA, "DATABRICKS_TOKEN" = NA),
{
databricks_token(fail = TRUE)
}),
"No authentication token was identified"
)

expect_named(
withr::with_envvar(
new = c("DATABRICKS_TOKEN" = NA, "CONNECT_DATABRICKS_TOKEN" = "testing"),
{
databricks_token()
}),
"environment_connect"
)
})

test_that("Get cluster version", {
Expand Down
33 changes: 33 additions & 0 deletions tests/testthat/test-python-install.R
Original file line number Diff line number Diff line change
@@ -1,3 +1,36 @@
test_that("Databricks installation works", {
skip_if_not_databricks()
local_mocked_bindings(install_as_job = function(...) list(...))
out <- install_databricks(version = "14.1")
expect_snapshot(out)
})

test_that("Null version and libs work", {
withr::with_envvar(
new = c("WORKON_HOME" = use_test_env()),
{
local_mocked_bindings(py_install = function(...) list(...))
expect_message(
install_environment(
libs = "pyspark",
new_env = FALSE,
python = Sys.which("python")
),
"Retrieving version from PyPi.org"
)

expect_error(
install_environment(
libs = "pyspark",
version = "0.1",
new_env = FALSE,
python = Sys.which("python")
),
"Version '0.1' does not exist"
)
})
})

test_that("installed_components() output properly", {
sc <- use_test_spark_connect()
expect_message(installed_components())
Expand Down

0 comments on commit cfaffe7

Please sign in to comment.