Skip to content

Commit

Permalink
Merge pull request #60 from mlverse/updates
Browse files Browse the repository at this point in the history
Updates
  • Loading branch information
edgararuiz authored Jan 23, 2024
2 parents d1ab41e + f3caae0 commit 057ef71
Show file tree
Hide file tree
Showing 12 changed files with 103 additions and 96 deletions.
10 changes: 5 additions & 5 deletions R/app_theme.R
Original file line number Diff line number Diff line change
Expand Up @@ -120,12 +120,12 @@ app_theme_rgb_to_hex <- function(x) {
}

running_as_job <- function(x = NULL) {
if(!is.null(x)) {
if (!is.null(x)) {
ch_env$as_job <- x
} else {
if(is.null(ch_env$as_job)) {
ch_env$as_job <-FALSE
}
} else {
if (is.null(ch_env$as_job)) {
ch_env$as_job <- FALSE
}
}
ch_env$as_job
}
44 changes: 24 additions & 20 deletions R/backend-llamagpt.R
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
# -------------------------------- Submit --------------------------------------
#' @export
ch_submit.ch_llamagpt <- function(defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
ch_submit.ch_llamagpt <- function(
defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
if (ui_current_markdown()) {
return(invisible())
}
Expand Down Expand Up @@ -46,10 +47,11 @@ ch_submit.ch_llamagpt <- function(defaults,

# ----------------------------- Session ----------------------------------------

ch_llamagpt_session <- function(defaults = chattr_defaults(),
r_file_stream = NULL,
r_file_complete = NULL,
testing = FALSE) {
ch_llamagpt_session <- function(
defaults = chattr_defaults(),
r_file_stream = NULL,
r_file_complete = NULL,
testing = FALSE) {
init_session <- FALSE
if (is.null(ch_env$llamagpt$session)) {
init_session <- TRUE
Expand Down Expand Up @@ -80,11 +82,12 @@ ch_llamagpt_prompt <- function(prompt) {
ch_env$llamagpt$session$write_input(prompt)
}

ch_llamagpt_output <- function(stream_to,
stream_file = NULL,
output_file = NULL,
timeout = 1000,
output = NULL) {
ch_llamagpt_output <- function(
stream_to,
stream_file = NULL,
output_file = NULL,
timeout = 1000,
output = NULL) {
all_output <- NULL
stop_stream <- FALSE
timeout <- timeout / 0.01
Expand Down Expand Up @@ -140,9 +143,10 @@ ch_llamagpt_args <- function(defaults) {
reduce(c)
}

ch_llamagpt_printout <- function(defaults,
r_file_stream = NULL,
output = NULL) {
ch_llamagpt_printout <- function(
defaults,
r_file_stream = NULL,
output = NULL) {
if (defaults$type == "chat") {
ch_llamagpt_output("chat", r_file_stream)
} else {
Expand Down
9 changes: 5 additions & 4 deletions R/backend-openai-core.R
Original file line number Diff line number Diff line change
Expand Up @@ -102,10 +102,11 @@ openai_stream_ide_delta <- function(x, defaults, testing = FALSE) {
}


openai_stream_file <- function(defaults,
req_body,
r_file_stream,
r_file_complete) {
openai_stream_file <- function(
defaults,
req_body,
r_file_stream,
r_file_complete) {
ch_env$stream <- list()
ch_env$stream$response <- NULL
ret <- NULL
Expand Down
103 changes: 55 additions & 48 deletions R/backend-openai.R
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
#' @export
ch_submit.ch_open_ai_chat_completions <- function(defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
ch_submit.ch_open_ai_chat_completions <- function(
defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
ch_submit_open_ai(
defaults = defaults,
prompt = prompt,
Expand All @@ -20,14 +21,15 @@ ch_submit.ch_open_ai_chat_completions <- function(defaults,
}

#' @export
ch_submit.ch_open_ai_completions <- function(defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
ch_submit.ch_open_ai_completions <- function(
defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
ch_submit_open_ai(
defaults = defaults,
prompt = prompt,
Expand All @@ -41,14 +43,15 @@ ch_submit.ch_open_ai_completions <- function(defaults,
}


ch_submit_open_ai <- function(defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
ch_submit_open_ai <- function(
defaults,
prompt = NULL,
stream = NULL,
prompt_build = TRUE,
preview = FALSE,
r_file_stream = NULL,
r_file_complete = NULL,
...) {
if (ui_current_markdown()) {
return(invisible())
}
Expand Down Expand Up @@ -143,20 +146,22 @@ build_header <- function(defaults) {

#--------------------------- Completion ----------------------------------------

openai_completion <- function(defaults,
prompt,
new_prompt,
r_file_stream,
r_file_complete,
stream) {
openai_completion <- function(
defaults,
prompt,
new_prompt,
r_file_stream,
r_file_complete,
stream) {
UseMethod("openai_completion")
}

openai_completion.ch_open_ai_chat_completions <- function(defaults,
prompt,
new_prompt,
r_file_stream,
r_file_complete) {
openai_completion.ch_open_ai_chat_completions <- function(
defaults,
prompt,
new_prompt,
r_file_stream,
r_file_complete) {
req_body <- c(
list(
model = defaults$model,
Expand Down Expand Up @@ -184,11 +189,12 @@ openai_completion.ch_open_ai_chat_completions <- function(defaults,
ret
}

openai_completion.ch_open_ai_completions <- function(defaults,
prompt,
new_prompt,
r_file_stream,
r_file_complete) {
openai_completion.ch_open_ai_completions <- function(
defaults,
prompt,
new_prompt,
r_file_stream,
r_file_complete) {
req_body <- c(
list(
model = defaults$model,
Expand Down Expand Up @@ -216,11 +222,12 @@ openai_completion.ch_open_ai_completions <- function(defaults,
ret
}

openai_switch <- function(prompt,
req_body,
defaults,
r_file_stream,
r_file_complete) {
openai_switch <- function(
prompt,
req_body,
defaults,
r_file_stream,
r_file_complete) {
ret <- NULL
stream <- defaults$model_arguments$stream %||% FALSE
return_result <- TRUE
Expand Down Expand Up @@ -260,18 +267,18 @@ app_init_message.ch_open_ai_chat_completions <- function(defaults) {

app_init_openai <- function(defaults) {
print_provider(defaults)
if(defaults$max_data_files > 0) {
if (defaults$max_data_files > 0) {
cli_alert_warning(
paste0(
"A list of the top {defaults$max_data_files} files will ",
"be sent externally to OpenAI with every request\n",
"To avoid this, set the number of files to be sent to 0 ",
"using {.run chattr::chattr_defaults(max_data_files = 0)}"
)
)
)
}

if(defaults$max_data_frames > 0) {
if (defaults$max_data_frames > 0) {
cli_alert_warning(
paste0(
"A list of the top {defaults$max_data_frames} data.frames ",
Expand Down
3 changes: 1 addition & 2 deletions R/chattr-defaults.R
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ chattr_defaults <- function(type = "default",
for (j in seq_along(check_files)) {
td_defaults <- read_yaml(file = check_files[j])
loaded_default <- chattr_defaults_get(type = "default")
if(!is.null(loaded_default)) {
if (!is.null(loaded_default)) {
td_defaults$default <- loaded_default
}
check_defaults <- c("default", type)
Expand Down Expand Up @@ -130,7 +130,6 @@ chattr_defaults_get <- function(type = "notebook") {

#' @export
print.ch_model <- function(x, ...) {

cli_div(theme = cli_colors())

cli_h1("chattr")
Expand Down
11 changes: 4 additions & 7 deletions R/chattr-test.R
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
#' @inheritParams ch_submit
#' @export
chattr_test <- function(defaults = NULL) {
if(is.null(defaults)) defaults <- chattr_defaults()
if (is.null(defaults)) defaults <- chattr_defaults()
ch_test(defaults)
}

Expand All @@ -24,16 +24,15 @@ ch_test.ch_open_ai_completions <- function(defaults = NULL) {
}

ch_test_open_ai <- function(defaults = NULL) {

if(ch_debug_get()) {
if (ch_debug_get()) {
prompt <- "TEST"
out <- "TEST"
} else {
prompt <- "Hi!"
out <- capture.output(chattr(prompt))
}

if(is.null(out)) out <- ""
if (is.null(out)) out <- ""

cli_div(theme = cli_colors())
cli_h3("Testing chattr")
Expand All @@ -52,7 +51,7 @@ ch_test_open_ai <- function(defaults = NULL) {

#' @export
ch_test.ch_llamagpt <- function(defaults = NULL) {
if(ch_debug_get()) {
if (ch_debug_get()) {
error <- ""
x <- TRUE
} else {
Expand Down Expand Up @@ -81,5 +80,3 @@ ch_test.ch_llamagpt <- function(defaults = NULL) {
}
invisible()
}


2 changes: 1 addition & 1 deletion R/chattr.R
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ chattr <- function(prompt = NULL,
prompt_build = TRUE,
stream = NULL) {
ui <- ui_current()
if(ui == "") ui <- "console"
if (ui == "") ui <- "console"
ret <- ch_submit(
defaults = chattr_defaults(type = ui),
prompt = prompt,
Expand Down
7 changes: 4 additions & 3 deletions R/ide.R
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@ ide_comment_selection <- function() {
selected <- active_doc$contents[start_row:end_row]
end_size <- nchar(selected[length(selected)])

if(end_size == 0) return("")
if (end_size == 0) {
return("")
}

first_letter <- substr(selected, 1, 1)
commented <- first_letter == "#"
Expand Down Expand Up @@ -110,12 +112,11 @@ ide_comment_selection <- function() {
ide_build_prompt <- function(prompt = NULL,
defaults = chattr_defaults(),
preview = FALSE) {

if (is.null(prompt)) {
prompt <- ide_comment_selection()
}

if(prompt == "" && preview) {
if (prompt == "" && preview) {
prompt <- "[Your future prompt goes here]"
}

Expand Down
6 changes: 3 additions & 3 deletions R/utils.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ print.ch_request <- function(x, ...) {
~ {
cli_div(theme = cli_colors())
cli_li("{.y}: {.val1 {.x}}")
}
}
)
}
print_history(x$prompt)
Expand Down Expand Up @@ -91,11 +91,11 @@ cli_colors <- function(envir = parent.frame()) {
span.val0 = list(color = "black"),
span.val1 = list(color = "blue"),
span.val2 = list(color = "darkgreen")
)
)
}

ui_validate <- function(x) {
if(!(x %in% ch_env$valid_uis)) {
if (!(x %in% ch_env$valid_uis)) {
valid <- paste0(ch_env$valid_uis, collapse = ", ")
abort(
paste0("'", x, "' is not a valid type. Acceptable values are: ", valid)
Expand Down
1 change: 0 additions & 1 deletion README.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ format: md
source("utils/toc.R")
toc()
```

```{r}
Expand Down
2 changes: 1 addition & 1 deletion tests/testthat/test-backend-openai.R
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
test_that("Init messages work",{
test_that("Init messages work", {
expect_snapshot(chattr_use("gpt35"))
chattr_defaults(max_data_files = 10, max_data_frames = 10)
expect_snapshot(app_init_openai(chattr_defaults()))
Expand Down
Loading

0 comments on commit 057ef71

Please sign in to comment.