Skip to content

Commit

Permalink
Merge pull request #30 from mlverse/updates
Browse files Browse the repository at this point in the history
CRAN feedback updates
  • Loading branch information
edgararuiz authored Oct 21, 2024
2 parents d046e9f + e77e58e commit 4d4422b
Show file tree
Hide file tree
Showing 34 changed files with 63 additions and 89 deletions.
1 change: 1 addition & 0 deletions .github/workflows/R-CMD-check.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ jobs:
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}
R_KEEP_PKG_SOURCE: yes
_R_CHECK_DONTTEST_EXAMPLES_: false

steps:
- uses: actions/checkout@v4
Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/test-coverage.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ jobs:
runs-on: ubuntu-latest
env:
GITHUB_PAT: ${{ secrets.GITHUB_TOKEN }}

_R_CHECK_DONTTEST_EXAMPLES_: false

steps:
- uses: actions/checkout@v4

Expand Down
4 changes: 2 additions & 2 deletions python/mall/polars.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,14 @@ def use(self, backend="", model="", _cache="_mall_cache", **kwargs):
```{python}
# Additional arguments will be passed 'as-is' to the
# downstream R function in this example, to ollama::chat()
reviews.llm.use("ollama", "llama3.2", seed = 100, temp = 0.1)
reviews.llm.use("ollama", "llama3.2", seed = 100, temperature = 0.1)
```
```{python}
# During the Python session, you can change any argument
# individually and it will retain all of previous
# arguments used
reviews.llm.use(temp = 0.3)
reviews.llm.use(temperature = 0.3)
```
```{python}
Expand Down
4 changes: 2 additions & 2 deletions r/DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: mall
Title: Run multiple 'Large Language Model' predictions against a table, or
vectors
Title: Run Multiple Large Language Model Predictions Against a Table, or
Vectors
Version: 0.1.0
Authors@R: c(
person("Edgar", "Ruiz", , "[email protected]", role = c("aut", "cre")),
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-classify.R
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
#' @returns `llm_classify` returns a `data.frame` or `tbl` object.
#' `llm_vec_classify` returns a vector that is the same length as `x`.
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-custom.R
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
#' deterministic, provide the options in a vector. This function will set to
#' `NA` any response not in the options
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-extract.R
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
#' new column names, if not, the function will use a sanitized version of
#' the content as the name.
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-sentiment.R
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#' `llm_vec_sentiment` returns a vector that is the same length as `x`.
#'
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-summarize.R
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#' @param max_words The maximum number of words that the LLM should use in the
#' summary. Defaults to 10.
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-translate.R
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#' @inheritParams llm_classify
#' @param language Target language to translate the text to
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
12 changes: 6 additions & 6 deletions r/R/llm-use.R
Original file line number Diff line number Diff line change
Expand Up @@ -13,22 +13,22 @@
#' R session
#' @param .cache The path to save model results, so they can be re-used if
#' the same operation is ran again. To turn off, set this argument to an empty
#' character: `""`. 'It defaults to '_mall_cache'. If this argument is left
#' character: `""`. It defaults to a temp folder. If this argument is left
#' `NULL` when calling this function, no changes to the path will be made.
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' llm_use("ollama", "llama3.2")
#'
#' # Additional arguments will be passed 'as-is' to the
#' # downstream R function in this example, to ollama::chat()
#' llm_use("ollama", "llama3.2", seed = 100, temp = 0.1)
#' llm_use("ollama", "llama3.2", seed = 100, temperature = 0.1)
#'
#' # During the R session, you can change any argument
#' # individually and it will retain all of previous
#' # arguments used
#' llm_use(temp = 0.3)
#' llm_use(temperature = 0.3)
#'
#' # Use .cache to modify the target folder for caching
#' llm_use(.cache = "_my_cache")
Expand Down Expand Up @@ -79,10 +79,10 @@ llm_use <- function(
}

if (.force) {
cache <- .cache %||% "_mall_cache"
cache <- .cache %||% tempfile("_mall_cache")
m_defaults_reset()
} else {
cache <- .cache %||% m_defaults_cache() %||% "_mall_cache"
cache <- .cache %||% m_defaults_cache() %||% tempfile("_mall_cache")
}

backend <- backend %||% m_defaults_backend()
Expand Down
2 changes: 1 addition & 1 deletion r/R/llm-verify.R
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
#' `llm_vec_verify` returns a vector that is the same length as `x`.
#'
#' @examples
#' \dontrun{
#' \donttest{
#' library(mall)
#'
#' data("reviews")
Expand Down
14 changes: 13 additions & 1 deletion r/cran-comments.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,16 @@
## New submission
## Resubmission

Thank you for the feedback and instructions, I have made the following changes:

- Updated the Title field to title case

- Changed all \notrun{} to \nottest{}

- Changed default location in llm_use() to use a temp folder

- Changed the tests to use a temp folder location

### Original submission text:

This is a new package submission. Run multiple 'Large Language Model'
predictions against a table. The predictions run row-wise over a specified
Expand Down
2 changes: 1 addition & 1 deletion r/man/llm_classify.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion r/man/llm_custom.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion r/man/llm_extract.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion r/man/llm_sentiment.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion r/man/llm_summarize.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion r/man/llm_translate.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions r/man/llm_use.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion r/man/llm_verify.Rd

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

36 changes: 0 additions & 36 deletions r/tests/testthat/_snaps/zzz-cache.md

This file was deleted.

4 changes: 3 additions & 1 deletion r/tests/testthat/helper-ollama.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
.mall_test <- new.env()
.mall_test$ollama_present <- FALSE
.mall_test$ollama_checked <- FALSE
.mall_test$cache_ollama <- tempfile("_ollama_cache")
.mall_test$cache <- tempfile("_mall_cache")

ollama_is_present <- function() {
if (.mall_test$ollama_checked) {
Expand All @@ -25,7 +27,7 @@ skip_if_no_ollama <- function() {
seed = 100,
.silent = TRUE,
.force = TRUE,
.cache = "_ollama_cache"
.cache = .mall_test$cache_ollama
)
}
}
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-classify.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
test_that("Classify works", {
test_text <- "this is a test"
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)
expect_equal(
llm_vec_classify(test_text, labels = test_text),
test_text
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-custom.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
test_that("Custom works", {
test_text <- "this is a test"
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)
expect_equal(
llm_vec_custom(test_text, "this is a test: "),
test_text
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-extract.R
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
test_that("Extract works", {
llm_use("simulate_llm", "prompt", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "prompt", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)

expect_snapshot(
llm_vec_extract("toaster", labels = "product")
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-sentiment.R
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
test_that("Sentiment works", {
llm_use("simulate_llm", "pipe", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "pipe", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)
expect_equal(
llm_vec_sentiment("this is a test|positive"),
"positive"
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-summarize.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
test_that("Summarize works", {
test_text <- "this is a test"
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)
expect_equal(
llm_vec_summarize(test_text),
test_text
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-translate.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
test_that("Translate works", {
test_text <- "this is a test"
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)
expect_equal(
llm_vec_translate(test_text, language = "other"),
test_text
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-llm-verify.R
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
test_that("Verify works", {
test_text <- "this is a test"
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE)
llm_use("simulate_llm", "echo", .silent = TRUE, .force = TRUE, .cache = .mall_test$cache)
expect_equal(
llm_vec_verify(test_text, "test", yes_no = test_text),
test_text
Expand Down
2 changes: 1 addition & 1 deletion r/tests/testthat/test-m-backend-prompt.R
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ test_that("Prompt handles list()", {
model = "prompt",
.silent = TRUE,
.force = TRUE,
.cache = "_prompt_cache"
.cache = tempfile("_prompt_cache")
)
test_text <- "Custom:{prompt}\n{{x}}"
expect_equal(
Expand Down
12 changes: 3 additions & 9 deletions r/tests/testthat/test-zzz-cache.R
Original file line number Diff line number Diff line change
@@ -1,14 +1,8 @@
test_that("Cache exists and delete", {
if (!fs::dir_exists("_mall_cache")) skip("Missing '_mall_cache' folder")
expect_snapshot(fs::dir_ls("_mall_cache", recurse = TRUE))
fs::dir_delete("_mall_cache")
})

test_that("Ollama cache exists and delete", {
skip_if_no_ollama()
expect_equal(
length(fs::dir_ls("_ollama_cache", recurse = TRUE)),
53
length(fs::dir_ls(.mall_test$cache_ollama , recurse = TRUE)),
59
)
fs::dir_delete("_ollama_cache")
fs::dir_delete(.mall_test$cache_ollama )
})
Loading

0 comments on commit 4d4422b

Please sign in to comment.