Skip to content

Commit

Permalink
Select latest envname if match not available, with a bunch of messages
Browse files Browse the repository at this point in the history
  • Loading branch information
edgararuiz committed Sep 29, 2023
1 parent 95d05d4 commit 0b84a12
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 11 deletions.
37 changes: 28 additions & 9 deletions R/import-check.R
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,31 @@ import_check <- function(x, envname) {
env_loaded <- env_python(envname) == py_exe()
}

inst <- paste0(
" {.run pysparklyr::install_pyspark(",
"envname = \"{envname}\")}"
)
inst <- NULL

if(substr(envname, 1, 22) == "r-sparklyr-databricks-") {
inst <- paste0(
" {.run pysparklyr::install_databricks(",
"envname = \"{envname}\")}"
)
}

if(substr(envname, 1, 19) == "r-sparklyr-pyspark-") {
inst <- paste0(
" {.run pysparklyr::install_pyspark(",
"envname = \"{envname}\")}"
)
}

msg_install <- NULL
msg_restart <- NULL
if(!is.null(inst)) {
# msg_install <- paste("{.header - Use} ", inst, "{.header to install.}")
# msg_restart <- paste("- Restart your R session, and run:", inst)
}

if (inherits(out, "try-error")) {
cli_alert_danger(glue("`reticulate` error:\n {out[[1]]}"))
if (env_found) {
if (env_loaded) {
# found & loaded
Expand All @@ -43,8 +62,8 @@ import_check <- function(x, envname) {
"{.emph '{x}' }{.header is not available in the }",
"{.emph '{envname}' }{.header Python environment.}"
),
paste("{.header - Use}", inst, "{.header to install.}")
))
msg_install
), call = NULL)
} else {
cli_abort(c(
"{.emph '{x}' }{.header is not available current Python environment.}",
Expand All @@ -56,14 +75,14 @@ import_check <- function(x, envname) {
"{.header - Restart your R session, and avoid",
" initializing Python before using} {.emph '{x}'}"
)
))
), call = NULL)
}
} else {
cli_abort(c(
"{.emph '{x}' }{.header is not available current Python environment.}",
paste("- The {.emph '{envname}'} Python environment is not installed."),
paste("- Restart your R session, and run:", inst)
))
msg_restart
), call = NULL)
}
} else {
if (env_loaded) {
Expand Down
2 changes: 1 addition & 1 deletion R/install.R
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ install_environment <- function(
ver_name <- version_prep(version)

if(version == ver_name) {
version <- paste(version, ".*")
version <- paste0(version, ".*")
}

if (is.null(envname)) {
Expand Down
28 changes: 27 additions & 1 deletion R/spark-connect.R
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,23 @@ py_spark_connect <- function(master,
dbr <- version_prep(dbr_version)
}

envname <- glue("r-sparklyr-databricks-{dbr}")
env_base <- "r-sparklyr-databricks-"
envname <- glue("{env_base}{dbr}")
envs <- find_environments(env_base)
matched <- envs[envs == envname]
if(length(matched) == 0) {
envname <- envs[[1]]
cli_div(theme = cli_colors())
cli_alert_warning(paste(
"{.header A Python environment with a matching version was not found}",
"* {.header Will attempt connecting using }{.emph '{envname}'}",
paste0("* {.header To install the proper Python environment use:}",
" {.run pysparklyr::install_databricks(version = \"{dbr}\")}"
),
sep = "\n"
))
cli_end()
}

db <- import_check("databricks.connect", envname)
remote <- db$DatabricksSession$builder$remote(
Expand Down Expand Up @@ -285,3 +301,13 @@ cluster_dbr_info <- function(cluster_id,
req_perform() %>%
resp_body_json()
}


find_environments <- function(x) {
conda_names <- conda_list()$name
ve_names <- virtualenv_list()
all_names <- c(ve_names, conda_names)
sub_names <- substr(all_names, 1, nchar(x))
matched <- all_names[sub_names == x]
sort(matched, decreasing = FALSE)
}

0 comments on commit 0b84a12

Please sign in to comment.