From 3949478efc9c262c0eb23b588f5d7c4595ba7932 Mon Sep 17 00:00:00 2001 From: Edgar Ruiz Date: Fri, 22 Dec 2023 16:01:29 -0600 Subject: [PATCH 1/3] Adds databricks_install() and install_environment() tests --- tests/testthat/_snaps/python-install.md | 30 ++++++++++++++++++++++ tests/testthat/test-python-install.R | 33 +++++++++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/tests/testthat/_snaps/python-install.md b/tests/testthat/_snaps/python-install.md index 174a4e4..c0e619d 100644 --- a/tests/testthat/_snaps/python-install.md +++ b/tests/testthat/_snaps/python-install.md @@ -1,3 +1,33 @@ +# Databricks installation works + + Code + out + Output + $libs + [1] "databricks-connect" + + $version + [1] "14.1" + + $envname + NULL + + $python_version + [1] ">=3.9" + + $new_env + [1] TRUE + + $method + [1] "auto" "virtualenv" "conda" + + $as_job + [1] TRUE + + $install_ml + [1] FALSE + + # Install code is correctly created Code diff --git a/tests/testthat/test-python-install.R b/tests/testthat/test-python-install.R index 369253d..2f18a0a 100644 --- a/tests/testthat/test-python-install.R +++ b/tests/testthat/test-python-install.R @@ -1,3 +1,36 @@ +test_that("Databricks installation works", { + skip_if_not_databricks() + local_mocked_bindings(install_as_job = function(...) list(...)) + out <- install_databricks(version = "14.1") + expect_snapshot(out) +}) + +test_that("Null version and libs work", { + withr::with_envvar( + new = c("WORKON_HOME" = use_test_env()), + { + local_mocked_bindings(py_install = function(...) list(...)) + expect_message( + install_environment( + libs = "pyspark", + new_env = FALSE, + python = Sys.which("python") + ), + "Retrieving version from PyPi.org" + ) + + expect_error( + install_environment( + libs = "pyspark", + version = "0.1", + new_env = FALSE, + python = Sys.which("python") + ), + "Version '0.1' does not exist" + ) + }) +}) + test_that("installed_components() output properly", { sc <- use_test_spark_connect() expect_message(installed_components()) From 2fa0eac56bf7c2d52a72acea7c3926c235a07dbf Mon Sep 17 00:00:00 2001 From: Edgar Ruiz Date: Fri, 22 Dec 2023 16:28:09 -0600 Subject: [PATCH 2/3] Adds host and token tests --- tests/testthat/test-databricks-utils.R | 41 ++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/tests/testthat/test-databricks-utils.R b/tests/testthat/test-databricks-utils.R index 607f1da..9164fc3 100644 --- a/tests/testthat/test-databricks-utils.R +++ b/tests/testthat/test-databricks-utils.R @@ -15,10 +15,51 @@ test_that("DBR error code returns as expected", { test_that("Databricks Host works", { expect_true(nchar(databricks_host()) > 5) + + expect_named(databricks_host("thisisatest"), "argument") + + expect_error( + withr::with_envvar( + new = c("DATABRICKS_HOST" = NA, "DATABRICKS_TOKEN" = NA), + { + databricks_host() + }), + "No Host URL was provided" + ) + + expect_named( + withr::with_envvar( + new = c("DATABRICKS_HOST" = NA, "CONNECT_DATABRICKS_HOST" = "testing"), + { + databricks_host() + }), + "environment_connect" + ) + }) test_that("Databricks Token works", { expect_true(nchar(databricks_token()) > 5) + + expect_named(databricks_token("thisisatest"), "argument") + + expect_error( + withr::with_envvar( + new = c("DATABRICKS_HOST" = NA, "DATABRICKS_TOKEN" = NA), + { + databricks_token(fail = TRUE) + }), + "No authentication token was identified" + ) + + expect_named( + withr::with_envvar( + new = c("DATABRICKS_TOKEN" = NA, "CONNECT_DATABRICKS_TOKEN" = "testing"), + { + databricks_token() + }), + "environment_connect" + ) }) test_that("Get cluster version", { From 9e1da93a617a2965bbf4a2002f075d82c96a4b69 Mon Sep 17 00:00:00 2001 From: Edgar Ruiz Date: Fri, 22 Dec 2023 16:28:21 -0600 Subject: [PATCH 3/3] Adds spark_write_table() test --- .gitignore | 1 + tests/testthat/.gitignore | 1 + tests/testthat/test-data-write.R | 8 ++++++++ 3 files changed, 10 insertions(+) create mode 100644 tests/testthat/.gitignore diff --git a/.gitignore b/.gitignore index 81bf353..0c9f57d 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ .Renviron .saved_model derby.log +spark-warehouse diff --git a/tests/testthat/.gitignore b/tests/testthat/.gitignore new file mode 100644 index 0000000..ba9e3b3 --- /dev/null +++ b/tests/testthat/.gitignore @@ -0,0 +1 @@ +spark-warehouse diff --git a/tests/testthat/test-data-write.R b/tests/testthat/test-data-write.R index 3977d05..6ed69b7 100644 --- a/tests/testthat/test-data-write.R +++ b/tests/testthat/test-data-write.R @@ -1,3 +1,11 @@ +test_that("Write table works", { + tbl_mtcars <- use_test_table_mtcars() + expect_silent( + spark_write_table(tbl_mtcars, "new_mtcars") + ) + dir_delete(test_path("spark-warehouse")) +}) + test_that("CSV works", { sc <- use_test_spark_connect() tbl_mtcars <- use_test_table_mtcars()