diff --git a/.Rbuildignore b/.Rbuildignore index 66fc6947..7fe262b5 100644 --- a/.Rbuildignore +++ b/.Rbuildignore @@ -23,4 +23,5 @@ vignettes/download_functions.Rmd vignettes/epa_download.Rmd vignettes/protected_datasets.Rmd inst/extdata/air.2m +inst/extdata/nasa/token.txt LICENSE.md \ No newline at end of file diff --git a/DESCRIPTION b/DESCRIPTION index 15d36d06..aaa94def 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,6 +1,6 @@ Package: amadeus -Title: Accessing and Analyzing Large-Scale Environmental Data in R -Version: 1.0.3 +Title: Accessing and Analyzing Large-Scale Environmental Data +Version: 1.0.7 Authors@R: c( person(given = "Mitchell", family = "Manware", role = c("aut", "ctb"), comment = c(ORCID = "0009-0003-6440-6106")), person(given = "Insang", family = "Song", role = c("aut", "ctb"), comment = c(ORCID = "0000-0001-8732-3256")), @@ -13,7 +13,18 @@ Maintainer: Kyle Messier Description: Functions are designed to facilitate access to and utility with large scale, publicly available environmental data in R. The package contains functions for downloading raw data files from web URLs (download_data()), processing the raw data files into clean spatial objects (process_covariates()), and extracting values from the spatial data objects at point and polygon locations (calc_covariates()). These functions call a series of source-specific functions which are tailored to each data sources/datasets particular URL structure, data format, and spatial/temporal resolution. The functions are tested, versioned, and open source and open access. For calc_sedc() method details, see Messier, Akita, and Serre (2012) . Depends: R (>= 4.1.0) Imports: dplyr, sf, sftime, stats, terra, methods, data.table, httr, rvest, exactextractr, utils, stringi, testthat (>= 3.0.0), parallelly, stars, future, future.apply, tidyr, rlang, nhdplusTools, archive, collapse, Rdpack -Suggests: covr, withr, knitr, rmarkdown, lwgeom, FNN, doRNG, devtools, stringr, tigris +Suggests: + covr, + withr, + knitr, + rmarkdown, + lwgeom, + FNN, + doRNG, + devtools, + stringr, + tigris, + spelling RdMacros: Rdpack Encoding: UTF-8 VignetteBuilder: knitr, rmarkdown @@ -22,5 +33,6 @@ RoxygenNote: 7.3.2 Config/Needs/website: tidyverse/tidytemplate Config/testhat/edition: 3 License: MIT + file LICENSE -URL: https://github.com/NIEHS/amadeus +URL: https://niehs.github.io/amadeus/ BugReports: https://github.com/NIEHS/amadeus/issues +Language: en-US diff --git a/R/calculate_covariates.R b/R/calculate_covariates.R index ce7b4dea..600b5b61 100644 --- a/R/calculate_covariates.R +++ b/R/calculate_covariates.R @@ -37,6 +37,8 @@ #' @return Calculated covariates as a data.frame or SpatVector object #' @author Insang Song #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_covariates( @@ -106,12 +108,14 @@ calc_covariates <- ... ) }, error = function(e) { - print(e) - print(args(what_to_run)) stop( paste0( - "Please refer to the argument list and the error message above ", - "to rectify the error.\n" + e, + "\n", + paste0(deparse(args(what_to_run)), collapse = "\n"), + "\n", + "Please refer to the argument list and ", + "the error message above to rectify the error.\n" ) ) }) @@ -150,6 +154,8 @@ calc_covariates <- #' @importFrom terra merge #' @importFrom methods is #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_koppen_geiger( @@ -307,6 +313,8 @@ calc_koppen_geiger <- #' @importFrom future.apply future_Map #' @importFrom collapse rowbind #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_nlcd( @@ -471,6 +479,8 @@ calc_nlcd <- function(from, #' @importFrom terra extract #' @importFrom data.table year #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_ecoregion( @@ -598,10 +608,12 @@ calc_ecoregion <- #' @importFrom sf st_as_sf #' @importFrom sf st_drop_geometry #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") #' calc_modis_daily( -#' from = mod06l2_warp, +#' from = mod06l2_warp, # dervied from process_modis() example #' locs = locs, #' locs_id = "id", #' radius = 0, @@ -812,6 +824,8 @@ calc_modis_daily <- function( #' @importFrom future.apply future_lapply #' @importFrom parallelly availableWorkers #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") #' locs <- terra::vect(locs, geom = c("lon", "lat"), crs = "EPSG:4326") @@ -1026,6 +1040,8 @@ process_modis_swath, or process_blackmarble.") #' @importFrom data.table month #' @importFrom data.table as.data.table #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_temporal_dummies( @@ -1308,6 +1324,8 @@ The result may not be accurate.\n", #' @importFrom dplyr ungroup #' @importFrom dplyr summarize #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_tri( @@ -1397,10 +1415,12 @@ calc_tri <- function( #' @importFrom terra project #' @importFrom terra intersect #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_nei( -#' from = nei, # derived from process_nei example, +#' from = nei, # derived from process_nei example #' locs = loc, #' locs_id = "id" #' ) @@ -1459,6 +1479,8 @@ calc_nei <- function( #' @importFrom dplyr all_of #' @importFrom stats setNames #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_hms( @@ -1692,6 +1714,8 @@ calc_hms <- function( #' @importFrom terra nlyr #' @importFrom terra crs #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_gmted( @@ -1813,6 +1837,8 @@ calc_gmted <- function( #' @importFrom terra nlyr #' @importFrom terra crs #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_narr( @@ -1906,6 +1932,8 @@ calc_narr <- function( #' @importFrom terra nlyr #' @importFrom terra crs #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_geos( @@ -1983,6 +2011,8 @@ calc_geos <- function( #' @return a data.frame or SpatVector object #' @importFrom methods is #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_sedac_population( @@ -2098,6 +2128,8 @@ calc_sedac_population <- function( #' @importFrom terra linearUnits #' @importFrom methods is #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_sedac_groads( @@ -2212,6 +2244,8 @@ calc_sedac_groads <- function( #' @importFrom terra nlyr #' @importFrom terra crs #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_merra2( @@ -2301,6 +2335,8 @@ calc_merra2 <- function( #' @importFrom terra nlyr #' @importFrom terra crs #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_gridmet( @@ -2386,6 +2422,8 @@ calc_gridmet <- function( #' @importFrom terra nlyr #' @importFrom terra crs #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' calc_terraclimate( @@ -2469,6 +2507,8 @@ calc_terraclimate <- function( #' @importFrom dplyr lag #' @importFrom dplyr select #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) #' terracliamte_covar <- calc_terraclimate( diff --git a/R/download.R b/R/download.R index b35900db..c028a304 100644 --- a/R/download.R +++ b/R/download.R @@ -109,10 +109,16 @@ download_data <- ) }, error = function(e) { - print(e) - print(args(what_to_run)) - stop(paste0("Please refer to the argument list and ", - "the error message above to rectify the error.\n")) + stop( + paste0( + e, + "\n", + paste0(deparse(args(what_to_run)), collapse = "\n"), + "\n", + "Please refer to the argument list and ", + "the error message above to rectify the error.\n" + ) + ) } ) } @@ -1016,7 +1022,7 @@ download_merra2 <- function( identifiers_df <- as.data.frame(identifiers) colnames(identifiers_df) <- c("collection_id", "estd_name", "DOI") if (!all(collection %in% identifiers_df$collection_id)) { - print(identifiers_df) + message(identifiers_df) stop(paste0("Requested collection is not recognized.\n Please refer to the table above to find a proper collection.\n")) } @@ -2181,6 +2187,9 @@ download_koppen_geiger <- function( #' #' \insertRef{article_roman2018vnp46}{amadeus} #' @examples +#' \dontrun{ +#' ## NOTE: Examples are wrapped in `/dontrun{}` to avoid sharing sensitive +#' ## NASA EarthData tokden information. #' # example with MOD09GA product #' download_modis( #' product = "MOD09GA", @@ -2188,8 +2197,7 @@ download_koppen_geiger <- function( #' horizontal_tiles = c(8, 8), #' vertical_tiles = c(4, 4), #' date = c("2024-01-01", "2024-01-01"), -#' nasa_earth_data_token = -#' system.file("extdata", "nasa", "token.txt", package = "amadeus"), +#' nasa_earth_data_token = "./pathtotoken/token.txt", #' directory_to_save = tempdir(), #' acknowledgement = TRUE, #' download = FALSE, # NOTE: download skipped for examples, @@ -2207,8 +2215,7 @@ download_koppen_geiger <- function( #' "extdata", "nasa", "LAADS_query.2024-08-02T12_49.csv", #' package = "amadeus" #' ), -#' nasa_earth_data_token = -#' system.file("extdata", "nasa", "token.txt", package = "amadeus"), +#' nasa_earth_data_token = "./pathtotoken/token.txt", #' directory_to_save = tempdir(), #' acknowledgement = TRUE, #' download = FALSE, # NOTE: download skipped for examples, @@ -2221,13 +2228,13 @@ download_koppen_geiger <- function( #' horizontal_tiles = c(8, 8), #' vertical_tiles = c(4, 4), #' date = c("2024-01-01", "2024-01-01"), -#' nasa_earth_data_token = -#' system.file("extdata", "nasa", "token.txt", package = "amadeus"), +#' nasa_earth_data_token = "./pathtotoken/token.txt", #' directory_to_save = tempdir(), #' acknowledgement = TRUE, #' download = FALSE, # NOTE: download skipped for examples, #' remove_command = TRUE #' ) +#' } # nolint end #' @export download_modis <- function( diff --git a/R/process.R b/R/process.R index 09b5830e..9c656ce6 100644 --- a/R/process.R +++ b/R/process.R @@ -36,6 +36,8 @@ #' covariate type and selections. #' @author Insang Song #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' process_covariates( #' covariate = "narr", @@ -107,12 +109,14 @@ process_covariates <- ... ) }, error = function(e) { - print(e) - print(args(what_to_run)) stop( paste0( - "Please refer to the argument list and the error message above to ", - "rectify the error.\n" + e, + "\n", + paste0(deparse(args(what_to_run)), collapse = "\n"), + "\n", + "Please refer to the argument list and ", + "the error message above to rectify the error.\n" ) ) }) @@ -213,6 +217,8 @@ process_modis_sds <- #' @importFrom terra tapp #' @importFrom terra is.rotated #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' mod09ga_flatten <- process_flatten_sds( #' path = @@ -297,6 +303,8 @@ the input then flatten it manually.") #' @author Insang Song #' @return a `SpatRaster` object #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' mod09ga_merge <- process_modis_merge( #' path = @@ -437,6 +445,8 @@ process_blackmarble_corners <- #' @importFrom terra crs #' @importFrom terra merge #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' vnp46a2 <- process_blackmarble( #' path = @@ -528,6 +538,8 @@ process_blackmarble <- function( #' @importFrom stars st_warp #' @importFrom stars read_stars #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' mod06l2_warp <- process_modis_warp( #' path = paste0( @@ -607,6 +619,8 @@ process_modis_warp <- #' @importFrom terra values #' @importFrom terra sprc #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' mod06l2_swath <- process_modis_swath( #' path = list.files( @@ -725,6 +739,8 @@ process_modis_swath <- #' @author Insang Song #' @importFrom terra rast #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' kg <- process_koppen_geiger( #' path = "./data/koppen_geiger_data.tif" @@ -769,6 +785,8 @@ process_koppen_geiger <- #' @importFrom tools file_path_sans_ext #' @importFrom terra rast metags #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' nlcd <- process_nlcd( #' path = "./data/", @@ -832,6 +850,8 @@ process_nlcd <- #' @importFrom sf st_read st_crs st_as_sfc st_transform st_intersects st_union #' @importFrom data.table year #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' ecoregion <- process_ecoregion( #' path = "./data/epa_ecoregion.gpkg" @@ -910,6 +930,8 @@ process_ecoregion <- #' @importFrom tidyr pivot_wider #' @importFrom stats setNames #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' tri <- process_tri( #' path = "./data", @@ -1018,6 +1040,8 @@ process_tri <- function( #' @importFrom data.table fread #' @importFrom data.table rbindlist #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' nei <- process_nei( #' path = "./data", @@ -1138,6 +1162,8 @@ process_nei <- function( #' resulting in a long processing time or even a crash if data is too large #' for your computing environment to process. #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' aqs <- process_aqs( #' path = "./data/aqs_daily_example.csv", @@ -1317,6 +1343,8 @@ process_aqs <- #' @return a `SpatRaster` object #' @importFrom terra rast #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' pop <- process_sedac_population( #' path = "./data/sedac_population_example.tif" @@ -1397,6 +1425,8 @@ process_sedac_population <- function( #' @return a `SpatVector` object #' @importFrom terra vect #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' groads <- process_sedac_groads( #' path = "./data/groads_example.shp" @@ -1660,6 +1690,8 @@ process_hms <- function( #' @importFrom terra rast #' @importFrom terra varnames #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' gmted <- process_gmted( #' variable = c("Breakline Emphasis", "7.5 arc-seconds"), @@ -1776,6 +1808,8 @@ process_gmted <- function( #' @importFrom terra subset #' @importFrom stringi stri_pad #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' process_narr( #' date = c("2018-01-01", "2018-01-01"), @@ -2024,6 +2058,8 @@ process_narr <- function( #' @importFrom terra crs #' @importFrom terra subset #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' geos <- process_geos( #' date = c("2024-01-01", "2024-01-10"), @@ -2216,6 +2252,8 @@ process_geos <- #' @importFrom terra crs #' @importFrom terra subset #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' merra2 <- process_merra2( #' date = c("2024-01-01", "2024-01-10"), @@ -2404,6 +2442,8 @@ process_merra2 <- #' @importFrom terra time #' @importFrom terra subset #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' gridmet <- process_gridmet( #' date = c("2023-01-01", "2023-01-10"), @@ -2571,6 +2611,8 @@ process_gridmet <- function( #' @importFrom terra time #' @importFrom terra subset #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' terraclimate <- process_terraclimate( #' date = c("2023-01-01", "2023-01-10"), @@ -2750,6 +2792,8 @@ process_terraclimate <- function( #' @importFrom rlang inject #' @importFrom nhdplusTools get_huc #' @examples +#' ## NOTE: Examples are wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' library(terra) #' getf <- "WBD_National_GDB.gdb" @@ -2845,6 +2889,8 @@ process_huc <- #' @importFrom terra rast #' @importFrom terra metags #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' cropscape <- process_cropscape( #' path = "./data/cropscape_example.tif", @@ -2904,6 +2950,8 @@ process_cropscape <- #' @importFrom terra rast #' @importFrom terra metags #' @examples +#' ## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +#' ## amount of data which is not included in the package. #' \dontrun{ #' prism <- process_prism( #' path = "./data/PRISM_ppt_stable_4kmM3_202104_nc.nc", diff --git a/README.md b/README.md index 080c6a3b..c7c13336 100644 --- a/README.md +++ b/README.md @@ -4,13 +4,19 @@ [![cov](https://NIEHS.github.io/amadeus/badges/coverage.svg)](https://github.com/NIEHS/amadeus/actions) [![lint](https://github.com/NIEHS/amadeus/actions/workflows/lint.yaml/badge.svg)](https://github.com/NIEHS/amadeus/actions/workflows/lint.yaml) [![pkgdown](https://github.com/NIEHS/amadeus/actions/workflows/pkgdown.yaml/badge.svg)](https://github.com/NIEHS/amadeus/actions/workflows/pkgdown.yaml) -[![Project Status: WIP – Initial development is in progress, but there has not yet been a stable, usable release suitable for the public.](https://www.repostatus.org/badges/latest/wip.svg)](https://www.repostatus.org/#wip) +[![Project Status: Active – The project has reached a stable, usable state and is being actively developed.](https://www.repostatus.org/badges/latest/active.svg)](https://www.repostatus.org/#active) +[![CRAN](https://www.r-pkg.org/badges/version/amadeus?color=blue)](https://cran.r-project.org/package=amadeus) +[![downloads](https://cranlogs.r-pkg.org/badges/grand-total/amadeus)](https://cran.r-project.org/package=amadeus) `amadeus` is **a** **m**ech**a**nism for **d**ata, **e**nvironments, and **u**ser **s**etup for common environmental and climate health datasets in R. `amadeus` has been developed to improve access to and utility with large scale, publicly available environmental data in R. ## Installation -`amadeus` is not yet available from CRAN, but it can be installed with the `devtools`, `remotes`, or `pak` packages. +`amadeus` can be installed from CRAN, or with the `devtools`, `remotes`, or `pak` packages. + +```r +install.packages("amadeus") +``` ```r devtools::install_github("NIEHS/amadeus") diff --git a/man/calc_covariates.Rd b/man/calc_covariates.Rd index ad84c8ca..8627c940 100644 --- a/man/calc_covariates.Rd +++ b/man/calc_covariates.Rd @@ -45,6 +45,8 @@ SpatRaster or SpatVector objects before passing to \code{covariate} argument value is converted to lowercase. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_covariates( diff --git a/man/calc_ecoregion.Rd b/man/calc_ecoregion.Rd index 65930eff..a8ca1851 100644 --- a/man/calc_ecoregion.Rd +++ b/man/calc_ecoregion.Rd @@ -35,6 +35,8 @@ binary (0 = point not in ecoregion; 1 = point in ecoregion) variables for each ecoregion. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_ecoregion( diff --git a/man/calc_geos.Rd b/man/calc_geos.Rd index cf2c6844..70172ad4 100644 --- a/man/calc_geos.Rd +++ b/man/calc_geos.Rd @@ -45,6 +45,8 @@ composition variable column name reflects variable and circular buffer radius. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_geos( diff --git a/man/calc_gmted.Rd b/man/calc_gmted.Rd index b9ae2523..f1aa1990 100644 --- a/man/calc_gmted.Rd +++ b/man/calc_gmted.Rd @@ -45,6 +45,8 @@ resolution of \code{from}, and circular buffer radius (ie. Breakline Emphasis at 7.5 arc-second resolution with 0 meter buffer: breakline_emphasis_r75_0). } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_gmted( diff --git a/man/calc_gridmet.Rd b/man/calc_gridmet.Rd index 9dcfa1d2..b7bd2b9a 100644 --- a/man/calc_gridmet.Rd +++ b/man/calc_gridmet.Rd @@ -43,6 +43,8 @@ object containing \code{locs_id} and gridMET variable. gridMET variable column name reflects the gridMET variable and circular buffer radius. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_gridmet( diff --git a/man/calc_hms.Rd b/man/calc_hms.Rd index b82d116e..f594979b 100644 --- a/man/calc_hms.Rd +++ b/man/calc_hms.Rd @@ -33,6 +33,8 @@ for wildfire smoke plume density inherited from \code{from} (0 = point not covered by wildfire smoke plume; 1 = point covered by wildfire smoke plume). } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_hms( diff --git a/man/calc_koppen_geiger.Rd b/man/calc_koppen_geiger.Rd index 101ced2d..de7e370d 100644 --- a/man/calc_koppen_geiger.Rd +++ b/man/calc_koppen_geiger.Rd @@ -42,6 +42,8 @@ dataset. For more information, see \url{https://www.nature.com/articles/sdata2018214}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_koppen_geiger( diff --git a/man/calc_lagged.Rd b/man/calc_lagged.Rd index 546bf974..662394c3 100644 --- a/man/calc_lagged.Rd +++ b/man/calc_lagged.Rd @@ -42,6 +42,8 @@ with geometry features of the same name. lag, buffer radius format adopted in \code{calc_setcolumns()}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) terracliamte_covar <- calc_terraclimate( diff --git a/man/calc_merra2.Rd b/man/calc_merra2.Rd index 87a09842..422dcc87 100644 --- a/man/calc_merra2.Rd +++ b/man/calc_merra2.Rd @@ -44,6 +44,8 @@ pressure level, and meteorological or atmospheric variable. Variable column name reflects variable and circular buffer radius. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_merra2( diff --git a/man/calc_modis_daily.Rd b/man/calc_modis_daily.Rd index 6c62dcf7..33ee98cc 100644 --- a/man/calc_modis_daily.Rd +++ b/man/calc_modis_daily.Rd @@ -61,10 +61,12 @@ swaths or tiles, so it is strongly recommended to check and pre-filter the file names at users' discretion. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") calc_modis_daily( - from = mod06l2_warp, + from = mod06l2_warp, # dervied from process_modis() example locs = locs, locs_id = "id", radius = 0, diff --git a/man/calc_modis_par.Rd b/man/calc_modis_par.Rd index 6531420b..4c190ebb 100644 --- a/man/calc_modis_par.Rd +++ b/man/calc_modis_par.Rd @@ -115,6 +115,8 @@ insufficient tiles. } } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ locs <- data.frame(lon = -78.8277, lat = 35.95013, id = "001") locs <- terra::vect(locs, geom = c("lon", "lat"), crs = "EPSG:4326") diff --git a/man/calc_narr.Rd b/man/calc_narr.Rd index 67c6d665..176243c0 100644 --- a/man/calc_narr.Rd +++ b/man/calc_narr.Rd @@ -44,6 +44,8 @@ meteorological variable. Meteorological variable column name reflects variable and circular buffer radius. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_narr( diff --git a/man/calc_nei.Rd b/man/calc_nei.Rd index 8a749c86..b20cc13b 100644 --- a/man/calc_nei.Rd +++ b/man/calc_nei.Rd @@ -27,10 +27,12 @@ a data.frame or SpatVector object Calculate road emissions covariates } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_nei( - from = nei, # derived from process_nei example, + from = nei, # derived from process_nei example locs = loc, locs_id = "id" ) diff --git a/man/calc_nlcd.Rd b/man/calc_nlcd.Rd index 321ab858..ab5613a1 100644 --- a/man/calc_nlcd.Rd +++ b/man/calc_nlcd.Rd @@ -62,6 +62,8 @@ but uses more memory as it will account for the partial overlap with the buffer. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_nlcd( diff --git a/man/calc_sedac_groads.Rd b/man/calc_sedac_groads.Rd index e2332317..4fcfd686 100644 --- a/man/calc_sedac_groads.Rd +++ b/man/calc_sedac_groads.Rd @@ -50,6 +50,8 @@ Unit is km / sq km. The returned \code{data.frame} object contains a dataset. For more information, see \url{https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/metadata}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_sedac_groads( diff --git a/man/calc_sedac_population.Rd b/man/calc_sedac_population.Rd index 193792e4..08d23956 100644 --- a/man/calc_sedac_population.Rd +++ b/man/calc_sedac_population.Rd @@ -44,6 +44,8 @@ density variable. Population density variable column name reflects spatial resolution of \code{from} and circular buffer radius. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_sedac_population( diff --git a/man/calc_temporal_dummies.Rd b/man/calc_temporal_dummies.Rd index 93a3a8e2..ab6ee652 100644 --- a/man/calc_temporal_dummies.Rd +++ b/man/calc_temporal_dummies.Rd @@ -36,6 +36,8 @@ Calculate temporal dummy covariates at point locations. Returns a value in \code{year}, and month and day of week binary variables. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_temporal_dummies( diff --git a/man/calc_terraclimate.Rd b/man/calc_terraclimate.Rd index c038ae7f..f251e272 100644 --- a/man/calc_terraclimate.Rd +++ b/man/calc_terraclimate.Rd @@ -49,6 +49,8 @@ will contain the year and month in YYYYMM format (ie. January, 2018 = 201801). } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_terraclimate( diff --git a/man/calc_tri.Rd b/man/calc_tri.Rd index 977c99fe..8ea296bb 100644 --- a/man/calc_tri.Rd +++ b/man/calc_tri.Rd @@ -42,6 +42,8 @@ object containing \code{locs_id} and variables for each chemical in U.S. context. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ loc <- data.frame(id = "001", lon = -78.90, lat = 35.97) calc_tri( diff --git a/man/download_modis.Rd b/man/download_modis.Rd index fd357915..68f84ea3 100644 --- a/man/download_modis.Rd +++ b/man/download_modis.Rd @@ -70,6 +70,9 @@ Directory structure looks like input/modis/raw/\{version\}/\{product\}/\{year\}/\{day_of_year\}. } \examples{ +\dontrun{ +## NOTE: Examples are wrapped in `/dontrun{}` to avoid sharing sensitive +## NASA EarthData tokden information. # example with MOD09GA product download_modis( product = "MOD09GA", @@ -77,8 +80,7 @@ download_modis( horizontal_tiles = c(8, 8), vertical_tiles = c(4, 4), date = c("2024-01-01", "2024-01-01"), - nasa_earth_data_token = - system.file("extdata", "nasa", "token.txt", package = "amadeus"), + nasa_earth_data_token = "./pathtotoken/token.txt", directory_to_save = tempdir(), acknowledgement = TRUE, download = FALSE, # NOTE: download skipped for examples, @@ -96,8 +98,7 @@ download_modis( "extdata", "nasa", "LAADS_query.2024-08-02T12_49.csv", package = "amadeus" ), - nasa_earth_data_token = - system.file("extdata", "nasa", "token.txt", package = "amadeus"), + nasa_earth_data_token = "./pathtotoken/token.txt", directory_to_save = tempdir(), acknowledgement = TRUE, download = FALSE, # NOTE: download skipped for examples, @@ -110,14 +111,14 @@ download_modis( horizontal_tiles = c(8, 8), vertical_tiles = c(4, 4), date = c("2024-01-01", "2024-01-01"), - nasa_earth_data_token = - system.file("extdata", "nasa", "token.txt", package = "amadeus"), + nasa_earth_data_token = "./pathtotoken/token.txt", directory_to_save = tempdir(), acknowledgement = TRUE, download = FALSE, # NOTE: download skipped for examples, remove_command = TRUE ) } +} \references{ \insertRef{data_mcd19a22021}{amadeus} diff --git a/man/process_aqs.Rd b/man/process_aqs.Rd index b5ff58d4..1c5475cb 100644 --- a/man/process_aqs.Rd +++ b/man/process_aqs.Rd @@ -58,6 +58,8 @@ resulting in a long processing time or even a crash if data is too large for your computing environment to process. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ aqs <- process_aqs( path = "./data/aqs_daily_example.csv", diff --git a/man/process_blackmarble.Rd b/man/process_blackmarble.Rd index cd30c121..78b8c9b2 100644 --- a/man/process_blackmarble.Rd +++ b/man/process_blackmarble.Rd @@ -39,6 +39,8 @@ georeferenced h5 files of Black Marble product. Referencing corner coordinates are necessary as the original h5 data do not include such information. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ vnp46a2 <- process_blackmarble( path = diff --git a/man/process_covariates.Rd b/man/process_covariates.Rd index 1c582555..8d5f3050 100644 --- a/man/process_covariates.Rd +++ b/man/process_covariates.Rd @@ -34,6 +34,8 @@ operate on the raw data files. To avoid errors, \strong{do not edit the raw data files before passing to \code{process_covariates}}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ process_covariates( covariate = "narr", diff --git a/man/process_cropscape.Rd b/man/process_cropscape.Rd index cb11ef73..b3ffc332 100644 --- a/man/process_cropscape.Rd +++ b/man/process_cropscape.Rd @@ -26,6 +26,8 @@ returning a single \code{SpatRaster} object. Reads CropScape file of selected \code{year}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ cropscape <- process_cropscape( path = "./data/cropscape_example.tif", diff --git a/man/process_ecoregion.Rd b/man/process_ecoregion.Rd index e69f5bd7..cd5673f3 100644 --- a/man/process_ecoregion.Rd +++ b/man/process_ecoregion.Rd @@ -27,6 +27,8 @@ This fix will ensure that the EPA air quality monitoring sites will be located within the ecoregion. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ ecoregion <- process_ecoregion( path = "./data/epa_ecoregion.gpkg" diff --git a/man/process_flatten_sds.Rd b/man/process_flatten_sds.Rd index 41619482..642a84f3 100644 --- a/man/process_flatten_sds.Rd +++ b/man/process_flatten_sds.Rd @@ -39,6 +39,8 @@ list of sub-datasets in the input file then consult the documentation of MODIS product. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ mod09ga_flatten <- process_flatten_sds( path = diff --git a/man/process_geos.Rd b/man/process_geos.Rd index d4ad60a3..7666e650 100644 --- a/man/process_geos.Rd +++ b/man/process_geos.Rd @@ -36,6 +36,8 @@ Layer names of the returned \code{SpatRaster} object contain the variable, pressure level, date, and hour. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ geos <- process_geos( date = c("2024-01-01", "2024-01-10"), diff --git a/man/process_gmted.Rd b/man/process_gmted.Rd index 0a03a98e..fef0e4a7 100644 --- a/man/process_gmted.Rd +++ b/man/process_gmted.Rd @@ -37,6 +37,8 @@ returning a single \code{SpatRaster} object. of release (2010). } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ gmted <- process_gmted( variable = c("Breakline Emphasis", "7.5 arc-seconds"), diff --git a/man/process_gridmet.Rd b/man/process_gridmet.Rd index 0b17d39b..63a56a7b 100644 --- a/man/process_gridmet.Rd +++ b/man/process_gridmet.Rd @@ -40,6 +40,8 @@ Layer names of the returned \code{SpatRaster} object contain the variable acrony and date. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ gridmet <- process_gridmet( date = c("2023-01-01", "2023-01-10"), diff --git a/man/process_huc.Rd b/man/process_huc.Rd index 19b7d637..1b8a027d 100644 --- a/man/process_huc.Rd +++ b/man/process_huc.Rd @@ -35,6 +35,8 @@ a \code{SpatVector} object Retrieve Hydrologic Unit Code (HUC) data } \examples{ +## NOTE: Examples are wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ library(terra) getf <- "WBD_National_GDB.gdb" diff --git a/man/process_koppen_geiger.Rd b/man/process_koppen_geiger.Rd index 222924fd..07304d41 100644 --- a/man/process_koppen_geiger.Rd +++ b/man/process_koppen_geiger.Rd @@ -23,6 +23,8 @@ The \code{process_koppen_geiger()} function imports and cleans raw climate classification data, returning a single \code{SpatRaster} object. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ kg <- process_koppen_geiger( path = "./data/koppen_geiger_data.tif" diff --git a/man/process_merra2.Rd b/man/process_merra2.Rd index 42cb08f7..e52989db 100644 --- a/man/process_merra2.Rd +++ b/man/process_merra2.Rd @@ -38,6 +38,8 @@ names are taken directly from raw data and are not edited to retain pressure level information. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ merra2 <- process_merra2( date = c("2024-01-01", "2024-01-10"), diff --git a/man/process_modis_merge.Rd b/man/process_modis_merge.Rd index 50ad0235..6f5a817e 100644 --- a/man/process_modis_merge.Rd +++ b/man/process_modis_merge.Rd @@ -42,6 +42,8 @@ MODIS products downloaded by functions in \code{amadeus}, and \href{https://github.com/rspatial/luna}{luna} are accepted. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ mod09ga_merge <- process_modis_merge( path = diff --git a/man/process_modis_swath.Rd b/man/process_modis_swath.Rd index 9b4a372c..c48ce6aa 100644 --- a/man/process_modis_swath.Rd +++ b/man/process_modis_swath.Rd @@ -50,6 +50,8 @@ Users need to select a subdataset to process. The full path looks like the full path to the hdf file. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ mod06l2_swath <- process_modis_swath( path = list.files( diff --git a/man/process_modis_warp.Rd b/man/process_modis_warp.Rd index 862a38c5..01094cc2 100644 --- a/man/process_modis_warp.Rd +++ b/man/process_modis_warp.Rd @@ -44,6 +44,8 @@ grid points. This function handles one file at a time. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ mod06l2_warp <- process_modis_warp( path = paste0( diff --git a/man/process_narr.Rd b/man/process_narr.Rd index 78685a37..d5e1848c 100644 --- a/man/process_narr.Rd +++ b/man/process_narr.Rd @@ -39,6 +39,8 @@ Layer names of the returned \code{SpatRaster} object contain the variable acrony pressure level, and date. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ process_narr( date = c("2018-01-01", "2018-01-01"), diff --git a/man/process_nei.Rd b/man/process_nei.Rd index fe8a46bd..a4124fd2 100644 --- a/man/process_nei.Rd +++ b/man/process_nei.Rd @@ -36,6 +36,8 @@ Users should be aware of the coordinate system of census boundary data for other analyses. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ nei <- process_nei( path = "./data", diff --git a/man/process_nlcd.Rd b/man/process_nlcd.Rd index c9d5cb7d..3e76edaa 100644 --- a/man/process_nlcd.Rd +++ b/man/process_nlcd.Rd @@ -26,6 +26,8 @@ returning a single \code{SpatRaster} object. Reads NLCD file of selected \code{year}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ nlcd <- process_nlcd( path = "./data/", diff --git a/man/process_prism.Rd b/man/process_prism.Rd index a329fa9f..684a75d6 100644 --- a/man/process_prism.Rd +++ b/man/process_prism.Rd @@ -31,6 +31,8 @@ returning a single \code{SpatRaster} object. Reads time series or 30-year normal PRISM data. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ prism <- process_prism( path = "./data/PRISM_ppt_stable_4kmM3_202104_nc.nc", diff --git a/man/process_sedac_groads.Rd b/man/process_sedac_groads.Rd index 21418032..9ead4008 100644 --- a/man/process_sedac_groads.Rd +++ b/man/process_sedac_groads.Rd @@ -27,6 +27,8 @@ U.S. context. The returned \code{SpatVector} object contains a dataset. For more information, see \url{https://sedac.ciesin.columbia.edu/data/set/groads-global-roads-open-access-v1/metadata}. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ groads <- process_sedac_groads( path = "./data/groads_example.shp" diff --git a/man/process_sedac_population.Rd b/man/process_sedac_population.Rd index 1eeb3d0e..a3134a1f 100644 --- a/man/process_sedac_population.Rd +++ b/man/process_sedac_population.Rd @@ -22,6 +22,8 @@ The \code{process_secac_population()} function imports and cleans raw population density data, returning a single \code{SpatRaster} object. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ pop <- process_sedac_population( path = "./data/sedac_population_example.tif" diff --git a/man/process_terraclimate.Rd b/man/process_terraclimate.Rd index bda3d6ca..da289314 100644 --- a/man/process_terraclimate.Rd +++ b/man/process_terraclimate.Rd @@ -42,6 +42,8 @@ TerraClimate data has monthly temporal resolution, so the first day of each mont is used as a placeholder temporal value. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ terraclimate <- process_terraclimate( date = c("2023-01-01", "2023-01-10"), diff --git a/man/process_tri.Rd b/man/process_tri.Rd index 59f7651f..78b52313 100644 --- a/man/process_tri.Rd +++ b/man/process_tri.Rd @@ -37,6 +37,8 @@ Visit \href{https://www.epa.gov/toxics-release-inventory-tri-program/tri-data-an to view the available years and variables. } \examples{ +## NOTE: Example is wrapped in `\dontrun{}` as function requires a large +## amount of data which is not included in the package. \dontrun{ tri <- process_tri( path = "./data", diff --git a/tests/testthat/test-aqs.R b/tests/testthat/test-aqs.R new file mode 100644 index 00000000..5a767498 --- /dev/null +++ b/tests/testthat/test-aqs.R @@ -0,0 +1,241 @@ +################################################################################ +##### unit and integration tests for U.S. EPA AQS functions + +################################################################################ +##### download_epa +testthat::test_that("download_aqs", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + year_start <- 2018 + year_end <- 2022 + resolution_temporal <- "daily" + parameter_code <- 88101 + directory_to_save <- paste0(tempdir(), "/epa/") + # run download function + download_data(dataset_name = "aqs", + year = c(year_start, year_end), + directory_to_save = directory_to_save, + acknowledgement = TRUE, + unzip = FALSE, + remove_zip = FALSE, + download = FALSE, + remove_command = FALSE) + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # define file path with commands + commands_path <- + paste0( + download_sanitize_path(directory_to_save), + "aqs_", + parameter_code, + "_", + year_start, "_", year_end, + "_", + resolution_temporal, + "_curl_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 4) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tets + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_aqs +testthat::test_that("process_aqs", { + withr::local_package("terra") + withr::local_package("data.table") + withr::local_package("sf") + withr::local_package("dplyr") + withr::local_options(list(sf_use_s2 = FALSE)) + + aqssub <- testthat::test_path( + "..", + "testdata", + "aqs_daily_88101_triangle.csv" + ) + testd <- testthat::test_path( + "..", "testdata" + ) + + # main test + testthat::expect_no_error( + aqsft <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "date-location", + return_format = "terra" + ) + ) + testthat::expect_no_error( + aqsst <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "available-data", + return_format = "terra" + ) + ) + testthat::expect_no_error( + aqslt <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "location", + return_format = "terra" + ) + ) + + # expect + testthat::expect_s4_class(aqsft, "SpatVector") + testthat::expect_s4_class(aqsst, "SpatVector") + testthat::expect_s4_class(aqslt, "SpatVector") + + testthat::expect_no_error( + aqsfs <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "date-location", + return_format = "sf" + ) + ) + testthat::expect_no_error( + aqsss <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "available-data", + return_format = "sf" + ) + ) + testthat::expect_no_error( + aqsls <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "location", + return_format = "sf" + ) + ) + testthat::expect_s3_class(aqsfs, "sf") + testthat::expect_s3_class(aqsss, "sf") + testthat::expect_s3_class(aqsls, "sf") + + testthat::expect_no_error( + aqsfd <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "date-location", + return_format = "data.table" + ) + ) + testthat::expect_no_error( + aqssd <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "available-data", + return_format = "data.table" + ) + ) + testthat::expect_no_error( + aqssdd <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "available-data", + data_field = "Arithmetic.Mean", + return_format = "data.table" + ) + ) + testthat::expect_no_error( + aqsld <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "location", + return_format = "data.table" + ) + ) + testthat::expect_no_error( + aqsldd <- process_aqs( + path = aqssub, + date = c("2022-02-04", "2022-02-28"), + mode = "location", + data_field = "Arithmetic.Mean", + return_format = "data.table" + ) + ) + testthat::expect_s3_class(aqsfd, "data.table") + testthat::expect_s3_class(aqssd, "data.table") + testthat::expect_s3_class(aqssdd, "data.table") + testthat::expect_s3_class(aqsld, "data.table") + testthat::expect_s3_class(aqsldd, "data.table") + + testthat::expect_no_error( + aqssf <- process_aqs( + path = testd, + date = c("2022-02-04", "2022-02-28"), + mode = "location", + return_format = "sf" + ) + ) + + tempd <- tempdir() + testthat::expect_error( + process_aqs( + path = tempd, + date = c("2022-02-04", "2022-02-28"), + return_format = "sf" + ) + ) + + # expect + testthat::expect_s3_class(aqssf, "sf") + + # error cases + testthat::expect_error( + process_aqs(testthat::test_path("../testdata", "modis")) + ) + testthat::expect_error( + process_aqs(path = 1L) + ) + testthat::expect_error( + process_aqs(path = aqssub, date = c("January", "Januar")) + ) + testthat::expect_error( + process_aqs(path = aqssub, date = c("2021-08-15")) + ) + testthat::expect_error( + process_aqs(path = aqssub, date = NULL) + ) + testthat::expect_no_error( + process_aqs( + path = aqssub, date = c("2022-02-04", "2022-02-28"), + mode = "available-data", return_format = "sf", + extent = c(-79, 33, -78, 36) + ) + ) + testthat::expect_no_error( + process_aqs( + path = aqssub, date = c("2022-02-04", "2022-02-28"), + mode = "available-data", return_format = "sf", + extent = c(-79, 33, -78, 36) + ) + ) + testthat::expect_warning( + process_aqs( + path = aqssub, date = c("2022-02-04", "2022-02-28"), + mode = "available-data", return_format = "data.table", + extent = c(-79, -78, 33, 36) + ), + "Extent is not applicable for data.table. Returning data.table..." + ) +}) diff --git a/tests/testthat/test-calc.R b/tests/testthat/test-calc.R new file mode 100644 index 00000000..3b950d48 --- /dev/null +++ b/tests/testthat/test-calc.R @@ -0,0 +1,274 @@ +################################################################################ +##### unit and integration tests for calc_covariates and auxiliary functions + +################################################################################ +##### calc_covariates +testthat::test_that("calc_covariates (expected errors)", { + withr::local_package("rlang") + withr::local_package("terra") + withr::local_package("sf") + withr::local_options(list(sf_use_s2 = FALSE)) + + candidates <- + c("modis", "koppen-geiger", + "koeppen-geiger", "koppen", "koeppen", + "geos", "dummies", "gmted", + "sedac_groads", "groads", "roads", + "ecoregions", "ecoregion", "hms", "smoke", + "gmted", "narr", "geos", + "sedac_population", "population", "nlcd", + "merra", "MERRA", "merra2", "MERRA2", + "tri", "nei", "prism", "huc", "cdl") + for (cand in candidates) { + testthat::expect_error( + calc_covariates(covariate = cand) + ) + } +}) + +testthat::test_that("calc_covariates (no errors)", { + withr::local_package("rlang") + withr::local_package("terra") + withr::local_package("sf") + withr::local_options(list(sf_use_s2 = FALSE)) + + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + ncp$time <- 2018 + ncpt <- + terra::vect(ncp, geom = c("lon", "lat"), + keepgeom = TRUE, crs = "EPSG:4326") + ncpt$time <- c(2018) + path_tri <- testthat::test_path("..", "testdata", "tri") + + testthat::expect_no_error( + tri_r <- process_tri(path = path_tri, year = 2018) + ) + + testthat::expect_no_error( + tri_c <- calc_covariates( + covariate = "tri", + from = tri_r, + locs = ncpt, + radius = 50000L + ) + ) + testthat::expect_true(is.data.frame(tri_c)) + + candidates <- + c("modis", "koppen-geiger", + "koeppen-geiger", "koppen", "koeppen", + "geos", "dummies", "gmted", + "sedac_groads", "groads", "roads", + "ecoregions", "ecoregion", "hms", "smoke", + "gmted", "narr", "geos", + "sedac_population", "population", "nlcd", + "merra", "merra2", + "gridmet", "terraclimate", + "tri", "nei") + for (cand in candidates) { + testthat::expect_error( + calc_covariates(covariate = cand) + ) + } +}) + +################################################################################ +##### calc_lagged +testthat::test_that("calc_lagged (geom = FALSE)", { + withr::local_package("terra") + withr::local_package("data.table") + lags <- c(0, 1, 2) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + testthat::expect_true( + is.function(calc_lagged) + ) + for (l in seq_along(lags)) { + narr <- + process_narr( + date = c("2018-01-01", "2018-01-10"), + variable = "weasd", + path = + testthat::test_path( + "..", + "testdata", + "narr", + "weasd" + ) + ) + narr_covariate <- + calc_narr( + from = narr, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean" + ) + # set column names + narr_covariate <- calc_setcolumns( + from = narr_covariate, + lag = 0, + dataset = "narr", + locs_id = "site_id" + ) + # expect identical if lag = 0 + if (lags[l] == 0) { + narr_lagged <- calc_lagged( + from = narr_covariate, + date = c("2018-01-01", "2018-01-10"), + lag = lags[l], + locs_id = "site_id", + time_id = "time" + ) + testthat::expect_identical(narr_lagged, narr_covariate) + } else { + # expect error because 2018-01-01 will not have lag data from 2017-12-31 + testthat::expect_error( + calc_lagged( + from = narr_covariate, + date = c("2018-01-01", "2018-01-10"), + lag = lags[l], + locs_id = "site_id", + time_id = "time" + ) + ) + narr_lagged <- calc_lagged( + from = narr_covariate, + date = c("2018-01-05", "2018-01-10"), + lag = lags[l], + locs_id = "site_id", + time_id = "time" + ) + # expect output is data.frame + testthat::expect_true( + class(narr_lagged) == "data.frame" + ) + # expect lag day + testthat::expect_true(grepl("_[0-9]{1}$", colnames(narr_lagged)[3])) + # expect no NA + testthat::expect_true(all(!is.na(narr_lagged))) + } + } +}) + +testthat::test_that("calc_lagged (geom = TRUE)", { + withr::local_package("terra") + withr::local_package("data.table") + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + testthat::expect_true( + is.function(calc_lagged) + ) + narr <- process_narr( + date = c("2018-01-01", "2018-01-10"), + variable = "weasd", + path = + testthat::test_path( + "..", + "testdata", + "narr", + "weasd" + ) + ) + narr_covariate <- + calc_narr( + from = narr, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean" + ) + # set column names + narr_covariate <- calc_setcolumns( + from = narr_covariate, + lag = 0, + dataset = "narr", + locs_id = "site_id" + ) + + # expect error with geom = TRUE and locs as data.frame + testthat::expect_error( + calc_lagged( + from = narr_covariate, + date = c("2018-01-02", "2018-01-04"), + lag = 1, + geom = TRUE + ) + ) +}) + +################################################################################ +##### calc_check_time +testthat::test_that("calc_check_time", { + testthat::expect_error( + # provide integer instead of data.frame to provoke error + calc_check_time(12, TRUE) + ) + testthat::expect_message( + # provide data.frame without time to provoke message + calc_check_time( + data.frame(x = 10, y = 20), + true + ) + ) +}) + +################################################################################ +##### calc_message +testthat::test_that("calc_message", + { + testthat::expect_no_error( + calc_message("gmted", "mean", "2020", "year", NULL) + ) + testthat::expect_no_error( + calc_message("narr", "shum", 2000, "year", NULL) + ) + } +) + +################################################################################ +##### calc_time +testthat::test_that("calc_time", { + testthat::expect_no_error( + rr <- calc_time("eternal", "timeless") + ) + testthat::expect_true(rr == "eternal") +}) + +################################################################################ +##### calc_worker +testthat::test_that("calc_worker", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_package("exactextractr") + withr::local_options(sf_use_s2 = FALSE) + + ncp <- data.frame(lon = -78.8277, lat = 35.95013, time = "boundless") + ncp$site_id <- "3799900018810101" + ncpt <- + terra::vect(ncp, geom = c("lon", "lat"), + keepgeom = TRUE, crs = "EPSG:4326") + nc <- system.file("gpkg/nc.gpkg", package = "sf") + nc <- terra::vect(nc) + nc <- terra::project(nc, "EPSG:4326") + ncrast <- terra::rast(nc, resolution = 0.05) + terra::values(ncrast) <- rgamma(terra::ncell(ncrast), 1, 1e-4) + + testthat::expect_no_error( + cwres <- + calc_worker( + from = ncrast, + dataset = "whatever", + locs_vector = ncpt, + locs_df = ncp, + time = ncpt$time, + time_type = "timeless", + radius = 1e5, + max_cells = 3e7 + ) + ) + testthat::expect_s3_class(cwres, "data.frame") +}) diff --git a/tests/testthat/test-calculate_covariates.R b/tests/testthat/test-calculate_covariates.R deleted file mode 100644 index dc9c69cf..00000000 --- a/tests/testthat/test-calculate_covariates.R +++ /dev/null @@ -1,1999 +0,0 @@ -## test for calculating covariates -## 1. Koppen-Geiger #### -testthat::test_that("calc_koppen_geiger works well", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_options( - list(sf_use_s2 = FALSE) - ) - - site_faux <- - data.frame( - site_id = "37031000188101", - lon = -78.90, - lat = 35.97 - ) - site_faux <- terra::vect(site_faux, crs = "EPSG:4326", keepgeom = TRUE) - kp_path <- testthat::test_path("..", "testdata", "koppen_subset.tif") - - testthat::expect_no_error( - kgras <- process_koppen_geiger(path = kp_path) - ) - - testthat::expect_no_error( - kg_res <- calc_koppen_geiger( - from = kgras, - locs = site_faux - ) - ) - testthat::expect_no_error( - kg_res <- calc_koppen_geiger( - from = kgras, - locs = sf::st_as_sf(site_faux) - ) - ) - # the result is a data frame - testthat::expect_s3_class(kg_res, "data.frame") - # ncol is equal to 7 - testthat::expect_equal(ncol(kg_res), 7) - # should have only one climate zone - testthat::expect_equal(sum(unlist(kg_res[, c(-1, -2)])), 1) - # with included geometry - testthat::expect_no_error( - kg_geom <- calc_koppen_geiger( - from = kgras, - locs = sf::st_as_sf(site_faux), - geom = TRUE - ) - ) - testthat::expect_equal(ncol(kg_geom), 7) - testthat::expect_true("SpatVector" %in% class(kg_geom)) -}) - -## 2. Temporal Dummies #### -testthat::test_that("calc_dummies works well", { - - site_faux <- - data.frame( - site_id = "37031000188101", - lon = -78.90, - lat = 35.97, - time = as.POSIXlt("2022-01-01") - ) - - testthat::expect_no_error( - dum_res <- calc_temporal_dummies( - locs = site_faux, - year = seq(2018L, 2022L) - ) - ) - - # the result is a data frame - testthat::expect_s3_class(dum_res, "data.frame") - # ncol is equal to 12 + 5 + 7 + 4 - testthat::expect_equal(ncol(dum_res), 28L) - # should have each of the indicator groups - testthat::expect_equal(sum(unlist(dum_res[, -1:-4])), 3L) - - # with geometry - testthat::expect_no_error( - dum_res_geom <- calc_temporal_dummies( - locs = site_faux, - year = seq(2018L, 2022L), - geom = TRUE - ) - ) - testthat::expect_s4_class(dum_res_geom, "SpatVector") - - # error cases - site_faux_err <- site_faux - colnames(site_faux_err)[4] <- "date" - testthat::expect_error( - dum_res <- calc_temporal_dummies( - locs = site_faux_err - ) - ) - - testthat::expect_error( - dum_res <- calc_temporal_dummies( - locs = as.matrix(site_faux_err) - ) - ) - -}) - -testthat::test_that("calc_temporal_dummies errors.", { - withr::local_package("terra") - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - testthat::expect_error( - calc_temporal_dummies( - ncp - ) - ) - testthat::expect_error( - calc_temporal_dummies( - terra::vect(ncp) - ) - ) -}) - -## 3. Ecoregions #### -testthat::test_that("calc_ecoregion works well", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_options(list(sf_use_s2 = FALSE)) - - ecol3 <- testthat::test_path("..", "testdata", "eco_l3_clip.gpkg") - site_faux <- - data.frame( - site_id = "37999109988101", - lon = -77.576, - lat = 39.40, - date = as.Date("2022-01-01") - ) - site_faux <- - terra::vect( - site_faux, - geom = c("lon", "lat"), - keepgeom = TRUE, - crs = "EPSG:4326") - site_faux <- terra::project(site_faux, "EPSG:5070") - - testthat::expect_no_error( - erras <- process_ecoregion(ecol3) - ) - - testthat::expect_no_error( - ecor_res <- calc_ecoregion( - from = erras, - locs = sf::st_as_sf(site_faux), - locs_id = "site_id" - ) - ) - - testthat::expect_no_error( - ecor_res <- calc_ecoregion( - from = erras, - locs = site_faux, - locs_id = "site_id" - ) - ) - - # the result is a data frame - testthat::expect_s3_class(ecor_res, "data.frame") - # ncol is equal to 2 + 5 + 2 + 1 + 1 - testthat::expect_equal(ncol(ecor_res), 4L) - # should have each of the indicator groups - dum_cn <- grep("DUM_", colnames(ecor_res)) - testthat::expect_equal( - sum(unlist(ecor_res[, dum_cn])), 2L - ) - - testthat::expect_no_error( - ecor_geom <- calc_ecoregion( - from = erras, - locs = site_faux, - locs_id = "site_id", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(ecor_geom), 4 - ) - testthat::expect_true( - "SpatVector" %in% class(ecor_geom) - ) -}) - -## 4. MODIS-VIIRS #### -testthat::test_that("calc_modis works well.", { - withr::local_package("sf") - withr::local_package("terra") - withr::local_package("stars") - withr::local_package("lwgeom") - withr::local_options( - list( - sf_use_s2 = FALSE, - future.resolve.recursive = 2L - ) - ) - - site_faux <- - data.frame( - site_id = "37999904288101", - lon = -78.87, - lat = 35.8734, - time = as.Date("2021-08-15") - ) - site_faux <- - terra::vect( - site_faux, - geom = c("lon", "lat"), - keepgeom = FALSE, - crs = "EPSG:4326") - - # case 1: standard mod11a1 - path_mod11 <- - testthat::test_path( - "../testdata/modis/", - "MOD11A1.A2021227.h11v05.061.2021228105320.hdf" - ) - testthat::expect_no_error( - base_mod11 <- - process_modis_merge( - path = path_mod11, - date = "2021-08-15", - subdataset = "(LST_)", - fun_agg = "mean" - ) - ) - testthat::expect_s4_class(base_mod11, "SpatRaster") - - testthat::expect_no_error( - suppressWarnings( - calc_mod11 <- - calc_modis_par( - from = path_mod11, - locs = sf::st_as_sf(site_faux), - preprocess = process_modis_merge, - name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), - subdataset = "(LST_)", - nthreads = 1L - ) - ) - ) - testthat::expect_s3_class(calc_mod11, "data.frame") - - # ... _add arguments test - aux <- 0L - testthat::expect_no_error( - suppressWarnings( - calc_mod11 <- - calc_modis_par( - from = path_mod11, - locs = sf::st_as_sf(site_faux), - preprocess = process_modis_merge, - package_list_add = c("MASS"), - export_list_add = c("aux"), - name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), - subdataset = "(LST_)", - nthreads = 1L - ) - ) - ) - - # with geometry - testthat::expect_no_error( - suppressWarnings( - calc_mod11_geom <- - calc_modis_par( - from = path_mod11, - locs = sf::st_as_sf(site_faux), - preprocess = process_modis_merge, - package_list_add = c("MASS"), - export_list_add = c("aux"), - name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), - subdataset = "(LST_)", - nthreads = 1L, - geom = TRUE - ) - ) - ) - testthat::expect_s4_class(calc_mod11_geom, "SpatVector") - - # case 2: swath mod06l2 - path_mod06 <- - list.files( - testthat::test_path("..", "testdata/modis"), - "MOD06", - full.names = TRUE - ) - testthat::expect_no_error( - suppressWarnings( - cloud0 <- process_modis_swath( - path = path_mod06, - subdataset = c("Cloud_Fraction_Day"), - date = "2021-08-15" - ) - ) - ) - - testthat::expect_no_error( - suppressWarnings( - calc_mod06 <- - calc_modis_par( - from = path_mod06, - locs = site_faux, - subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), - preprocess = process_modis_swath, - name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), - nthreads = 1 - ) - ) - ) - testthat::expect_s3_class(calc_mod06, "data.frame") - - # with geometry - testthat::expect_no_error( - suppressWarnings( - calc_mod06_geom <- - calc_modis_par( - from = path_mod06, - locs = site_faux, - subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), - preprocess = process_modis_swath, - name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), - nthreads = 1, - geom = TRUE - ) - ) - ) - testthat::expect_s4_class(calc_mod06_geom, "SpatVector") - - # case 3: VIIRS - path_vnp46 <- - list.files( - testthat::test_path("..", "testdata/modis"), - "VNP46", - full.names = TRUE - ) - testthat::expect_warning( - base_vnp <- process_blackmarble( - path = path_vnp46, - date = "2018-08-13", - tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)) - ) - ) - - testthat::expect_no_error( - suppressWarnings( - calc_vnp46 <- - calc_modis_par( - from = path_vnp46, - locs = site_faux, - preprocess = process_blackmarble, - name_covariates = c("MOD_NITLT_0_"), - subdataset = 3L, - nthreads = 1, - tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)) - ) - ) - ) - testthat::expect_s3_class(calc_vnp46, "data.frame") - - # with geometry (as SpatVector) - testthat::expect_no_error( - suppressWarnings( - calc_vnp46_geom_v <- - calc_modis_par( - from = path_vnp46, - locs = site_faux, - preprocess = process_blackmarble, - name_covariates = c("MOD_NITLT_0_"), - subdataset = 3L, - nthreads = 1, - tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), - geom = TRUE - ) - ) - ) - testthat::expect_s4_class(calc_vnp46_geom_v, "SpatVector") - - - # with geometry (as sf) - testthat::expect_no_error( - suppressWarnings( - calc_vnp46_geom_sf <- - calc_modis_par( - from = path_vnp46, - locs = sf::st_as_sf(site_faux), - preprocess = process_blackmarble, - name_covariates = c("MOD_NITLT_0_"), - subdataset = 3L, - nthreads = 1, - tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), - geom = TRUE - ) - ) - ) - testthat::expect_s4_class(calc_vnp46_geom_sf, "SpatVector") - - # error cases - testthat::expect_error( - process_modis_merge(path = site_faux) - ) - testthat::expect_error( - process_modis_merge( - path = path_mod11, - date = "2021-08-15", - fun_agg = 3L - ) - ) - testthat::expect_error( - process_modis_merge( - path = path_mod11, - date = "2021~08~15", - fun_agg = "mean" - ) - ) - - site_faux_r <- site_faux - names(site_faux_r)[1] <- "ID" - testthat::expect_error( - calc_modis_daily( - from = rast(nrow = 3, ncol = 3), - date = "2021-08-15", - locs = site_faux_r - ) - ) - testthat::expect_error( - calc_modis_daily( - from = rast(nrow = 3, ncol = 3), - date = "2021-08-15", - locs = matrix(c(1, 3, 4, 5), nrow = 2) - ) - ) - testthat::expect_error( - calc_modis_daily( - from = rast(nrow = 3, ncol = 3), - date = "2021-08-15", - locs = sf::st_as_sf(site_faux) - ) - ) - testthat::expect_error( - calc_modis_daily( - from = terra::rast(nrow = 3, ncol = 3, vals = 1:9, names = "a"), - date = "2021-08-15", - locs = array(1:12, dim = c(2, 2, 3)) - ) - ) - site_faux0 <- site_faux - names(site_faux0)[2] <- "date" - testthat::expect_error( - calc_modis_daily( - from = rast(nrow = 3, ncol = 3), - date = "2021-08-15", - locs = sf::st_as_sf(site_faux0) - ) - ) - site_faux2 <- site_faux - #site_faux2[, 4] <- NULL - - path_mcd19 <- - testthat::test_path( - "../testdata/modis/", - "MCD19A2.A2021227.h11v05.061.2023149160635.hdf" - ) - mcd_merge <- - process_modis_merge( - path = path_mcd19, - date = "2021-08-15", - subdataset = "(Optical_Depth)" - ) - - testthat::expect_no_error( - calc_modis_daily( - from = mcd_merge, - date = "2021-08-15", - locs = sf::st_as_sf(site_faux2), - radius = 1000, - name_extracted = "MCD_EXTR_1K_" - ) - ) - - # test calc_modis_daily directly with geometry - testthat::expect_no_error( - calc_mod_geom <- calc_modis_daily( - from = mcd_merge, - date = "2021-08-15", - locs = sf::st_as_sf(site_faux2), - radius = 1000, - name_extracted = "MCD_EXTR_1K_", - geom = TRUE - ) - ) - testthat::expect_s4_class(calc_mod_geom, "SpatVector") - - testthat::expect_error( - calc_modis_par(from = site_faux) - ) - testthat::expect_error( - calc_modis_par(from = path_mod11, product = "MOD11A1", locs = list(1, 2, 3)) - ) - testthat::expect_error( - calc_modis_par( - from = path_vnp46, - locs = site_faux, - preprocess = "fountain", - name_covariates = c("MOD_NITLT_0_", "MOD_K1_"), - subdataset = 3L, - nthreads = 1 - ) - ) - testthat::expect_warning( - calc_modis_par( - from = path_vnp46, - locs = site_faux, - preprocess = process_blackmarble, - name_covariates = c("MOD_NITLT_0_", "MOD_K1_"), - subdataset = 3L, - nthreads = 2, - tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)) - ) - ) - testthat::expect_warning( - flushed <- calc_modis_par( - from = path_vnp46, - locs = site_faux, - name_covariates = c("MOD_NITLT_0_"), - preprocess = process_blackmarble, - subdataset = 3L, - nthreads = 1, - radius = c(-1000, 0L) - ) - ) - testthat::expect_s3_class(flushed, "data.frame") - testthat::expect_true(unlist(flushed[, 2]) == -99999) - -}) - -## 5. NLCD #### -testthat::test_that("Check calc_nlcd works", { - withr::local_package("terra") - withr::local_package("exactextractr") - withr::local_package("sf") - withr::local_package("future") - withr::local_package("future.apply") - withr::local_options( - list(sf_use_s2 = FALSE, future.resolve.recursive = 2L) - ) - - point_us1 <- cbind(lon = -114.7, lat = 38.9, site_id = 1) - point_us2 <- cbind(lon = -114, lat = 39, site_id = 2) - point_ak <- cbind(lon = -155.997, lat = 69.3884, site_id = 3) # alaska - point_fr <- cbind(lon = 2.957, lat = 43.976, site_id = 4) # france - eg_data <- rbind(point_us1, point_us2, point_ak, point_fr) |> - as.data.frame() |> - terra::vect(crs = "EPSG:4326") - - path_testdata <- - testthat::test_path( - "..", - "testdata" - ) - # CHECK INPUT (error message) - # -- buf_radius is numeric - testthat::expect_no_error( - nlcdras <- process_nlcd(path = path_testdata) - ) - testthat::expect_s4_class(nlcdras, "SpatRaster") - - testthat::expect_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - radius = "1000"), - "radius is not a numeric." - ) - testthat::expect_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "whatnot", - radius = 1000) - ) - # -- buf_radius has likely value - testthat::expect_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - radius = -3), - "radius has not a likely value." - ) - - # -- two modes work properly - testthat::expect_no_error( - calc_nlcd(locs = sf::st_as_sf(eg_data), - from = nlcdras, - mode = "exact", - radius = 1000) - ) - testthat::expect_no_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "terra", - radius = 300) - ) - # -- multicore mode works properly - testthat::expect_no_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "exact", - radius = 1000, - nthreads = 2L) - ) - testthat::expect_no_error( - calc_nlcd(locs = eg_data, - from = nlcdras, - mode = "terra", - radius = 1000, - nthreads = 2L) - ) - - - # -- year is numeric - testthat::expect_error( - process_nlcd(path = path_testdata, year = "2021"), - "year is not a numeric." - ) - # -- year has likely value - testthat::expect_error( - process_nlcd(path = path_testdata, - year = 2032), - "NLCD data not available for this year." - ) - testthat::expect_error( - process_nlcd(path = path_testdata, - year = 1789), - "NLCD data not available for this year." - ) - testthat::expect_error( - calc_nlcd(locs = 12, - locs_id = "site_id", - from = nlcdras) - ) - testthat::expect_error( - calc_nlcd(locs = eg_data, - from = 12) - ) - # -- nlcd_path is not a character - testthat::expect_error( - process_nlcd(path = 3, - year = 2), - "path is not a character." - ) - # -- nlcd_path does not exist - nice_sentence <- "That's one small step for a man, a giant leap for mankind." - testthat::expect_error( - process_nlcd( - path = nice_sentence), - "path does not exist." - ) - - # CHECK OUTPUT - year <- 2021 - buf_radius <- 3000 - testthat::expect_no_error( - calc_nlcd( - locs = eg_data, - locs_id = "site_id", - from = nlcdras, - radius = buf_radius - ) - ) - output <- calc_nlcd( - locs = eg_data, - locs_id = "site_id", - radius = buf_radius, - from = nlcdras - ) - # -- returns a data.frame - testthat::expect_equal(class(output)[1], "data.frame") - # nrow(output) == nrow(input) - testthat::expect_equal(nrow(output), 4) - # -- initial names are still in the output data.frame - testthat::expect_true(all(names(eg_data) %in% names(output))) - # -- check the value of some of the points in the US - # the value has changed. What affected this behavior? - testthat::expect_equal( - output$LDU_TEFOR_0_03000[1], 0.8119843, tolerance = 1e-7 - ) - testthat::expect_equal( - output$LDU_TSHRB_0_03000[2], 0.9630467, tolerance = 1e-7 - ) - # -- class fraction rows should sum to 1 - testthat::expect_equal( - unname(rowSums(output[1:2, 3:(ncol(output))])), - rep(1, 2), - tolerance = 1e-7 - ) - # without geometry will have 11 columns - testthat::expect_equal( - ncol(output), 15 - ) - output_geom <- calc_nlcd( - locs = eg_data, - locs_id = "site_id", - radius = buf_radius, - from = nlcdras, - geom = TRUE - ) - # with geometry will have 12 columns - testthat::expect_equal( - ncol(output_geom), 15 - ) - testthat::expect_true( - "SpatVector" %in% class(output_geom) - ) -}) - -## 6. NEI #### -testthat::test_that("NEI calculation", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_package("data.table") - withr::local_options(list(sf_use_s2 = FALSE)) - withr::local_seed(202401) - - ncpath <- system.file("gpkg/nc.gpkg", package = "sf") - nc <- terra::vect(ncpath) - nc <- nc[grep("(Orange|Wake|Durham)", nc$NAME), ] - - neipath <- testthat::test_path("..", "testdata", "nei") - - testthat::expect_error( - neiras <- process_nei( - path = neipath, - county = nc, - year = 2017 - ) - ) - - nc$GEOID <- nc$FIPS - testthat::expect_no_error( - neiras <- process_nei( - path = neipath, - county = nc, - year = 2017 - ) - ) - # inspecting calculated results - testthat::expect_true(inherits(neiras, "SpatVector")) - testthat::expect_true(nrow(neiras) == 3) - - # sf case - testthat::expect_no_error( - neires <- process_nei( - path = neipath, - county = sf::st_as_sf(nc), - year = 2017 - ) - ) - testthat::expect_true(inherits(neires, "SpatVector")) - testthat::expect_true(nrow(neires) == 3) - - # error cases - testthat::expect_error( - process_nei(neipath, year = 2017) - ) - testthat::expect_error( - process_nei(neipath, "Orion/Betelgeuse", year = 2017) - ) - testthat::expect_error( - process_nei(neipath, nc, year = 2083) - ) - - # calc_nei - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - ncp$time <- 2018L - ncp <- terra::vect(ncp, keepgeom = TRUE, crs = "EPSG:4326") - nc <- terra::project(nc, "EPSG:4326") - - testthat::expect_no_error( - neicalced <- calc_nei( - locs = ncp, - from = neiras - ) - ) - testthat::expect_true(any(grepl("NEI", names(neicalced)))) - testthat::expect_equal(neicalced$TRF_NEINP_0_00000, 1579079, tolerance = 1) - - # with geometry - testthat::expect_no_error( - neicalced_geom <- calc_nei( - locs = ncp, - from = neiras, - geom = TRUE - ) - ) - testthat::expect_s4_class(neicalced_geom, "SpatVector") - - # more error cases - testthat::expect_condition( - calc_nei( - locs = "jittered", - from = neiras - ) - ) - -}) - -## 7. TRI #### -testthat::test_that("TRI calculation", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_package("dplyr") - withr::local_package("tidyr") - withr::local_package("data.table") - withr::local_options(sf_use_s2 = FALSE) - - ncp <- data.frame(lon = c(-78.8277, -78.0000), lat = c(35.95013, 80.000)) - ncp$site_id <- c("3799900018810101", "3799900018819999") - ncp$time <- 2018L - ncpt <- - terra::vect(ncp, geom = c("lon", "lat"), - keepgeom = TRUE, crs = "EPSG:4326") - ncpt$time <- 2018L - path_tri <- testthat::test_path("..", "testdata", "tri") - - testthat::expect_no_error( - tri_r <- process_tri(path = path_tri, year = 2018) - ) - testthat::expect_s4_class(tri_r, "SpatVector") - - testthat::expect_no_error( - tri_c <- calc_tri( - from = tri_r, - locs = ncpt, - radius = c(1500L, 50000L) - ) - ) - testthat::expect_true(is.data.frame(tri_c)) - - # with geometry - testthat::expect_no_error( - tri_c_geom <- calc_tri( - from = tri_r, - locs = ncpt, - radius = c(1500L, 50000L), - geom = TRUE - ) - ) - testthat::expect_s4_class(tri_c_geom, "SpatVector") - - testthat::expect_no_error( - calc_tri( - from = tri_r, - locs = sf::st_as_sf(ncpt), - radius = 50000L - ) - ) - testthat::expect_error( - calc_tri( - from = tempdir(), - locs = ncpt, - radius = 50000L - ) - ) - testthat::expect_error( - calc_tri( - from = paste0(tdir, "/tri/"), - locs = ncpt[, 1:2], - radius = 50000L - ) - ) - testthat::expect_error( - calc_tri( - from = paste0(tdir, "/tri/"), - locs = ncpt, - radius = "As far as the Earth's radius" - ) - ) -}) - -## 8. SEDC #### -testthat::test_that("calc_sedc tests", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_package("dplyr") - withr::local_package("tidyr") - withr::local_package("data.table") - withr::local_options(sf_use_s2 = FALSE) - - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - ncp$time <- 2018L - ncpt <- - terra::vect(ncp, geom = c("lon", "lat"), - keepgeom = TRUE, crs = "EPSG:4326") - path_tri <- testthat::test_path("..", "testdata", "tri") - - testthat::expect_no_error( - tri_r <- process_tri(path = path_tri, year = 2018) - ) - tri_r <- terra::project(tri_r, terra::crs(ncpt)) - - targcols <- grep("FUGITIVE_", names(tri_r), value = TRUE) - testthat::expect_no_error( - tri_sedc <- - calc_sedc( - locs = ncpt, - from = tri_r, - locs_id = "site_id", - sedc_bandwidth = 30000, - target_fields = targcols - ) - ) - testthat::expect_s3_class(tri_sedc, "data.frame") - - testthat::expect_no_error( - calc_sedc( - locs = sf::st_as_sf(ncpt), - from = sf::st_as_sf(tri_r), - locs_id = "site_id", - sedc_bandwidth = 30000, - target_fields = targcols - ) - ) - - # with geometry - testthat::expect_no_error( - tri_sedc_geom <- calc_sedc( - locs = ncpt, - from = tri_r, - locs_id = "site_id", - sedc_bandwidth = 30000, - target_fields = targcols, - geom = TRUE - ) - ) - testthat::expect_s4_class(tri_sedc_geom, "SpatVector") - - # warning case: duplicate field names between locs and from - ncpta <- ncpt - ncpta$YEAR <- 2018 - testthat::expect_warning( - calc_sedc( - locs = ncpta, - from = sf::st_as_sf(tri_r), - locs_id = "site_id", - sedc_bandwidth = 30000, - target_fields = targcols - ) - ) - -}) - -## 9. HMS #### -testthat::test_that("calc_hms returns expected.", { - withr::local_package("terra") - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_hms) - ) - for (r in seq_along(radii)) { - hms <- - process_hms( - date = c("2022-06-10", "2022-06-11"), - path = testthat::test_path( - "..", - "testdata", - "hms" - ) - ) - hms_covariate <- - calc_hms( - from = hms, - locs = ncp, - locs_id = "site_id", - radius = radii[r], - geom = FALSE - ) - # set column names - hms_covariate <- calc_setcolumns( - from = hms_covariate, - lag = 0, - dataset = "hms", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(hms_covariate) == "data.frame" - ) - # expect 3 columns - expect_true( - ncol(hms_covariate) == 5 - ) - # expect 2 rows - expect_true( - nrow(hms_covariate) == 2 - ) - # expect integer for binary value - expect_true( - is.integer(hms_covariate[, 3]) - ) - # expect binary - expect_true( - all(unique(hms_covariate[, 3]) %in% c(0, 1)) - ) - } -}) - -testthat::test_that("calc_hms with geom = TRUE", { - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - hms_dir <- testthat::test_path( - "..", "testdata", "hms" - ) - hms <- process_hms( - date = c("2022-06-10", "2022-06-13"), - path = hms_dir - ) - hms_covariate_geom <- calc_hms( - from = hms, - locs = ncp, - locs_id = "site_id", - radius = 0, - geom = TRUE - ) - # with geometry will have 5 columns - testthat::expect_equal( - ncol(hms_covariate_geom), 5 - ) - testthat::expect_s4_class( - hms_covariate_geom, "SpatVector" - ) -}) - -testthat::test_that("calc_hms with missing polygons (12/31/2018).", { - withr::local_package("terra") - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_hms) - ) - # expect function - testthat::expect_true( - is.function(process_hms) - ) - hms <- - process_hms( - date = c("2018-12-31", "2018-12-31"), - path = testthat::test_path( - "..", - "testdata", - "hms" - ) - ) - for (r in seq_along(radii)) { - hms_covar <- calc_hms( - from = hms, - locs = ncp, - locs_id = "site_id", - radius = radii[r], - geom = FALSE - ) - # data frame - testthat::expect_true(methods::is(hms_covar, "data.frame")) - # 5 columns - testthat::expect_equal(ncol(hms_covar), 7) - } - for (r in seq_along(radii)) { - hms_covar <- calc_hms( - from = hms, - locs = ncp, - locs_id = "site_id", - radius = radii[r], - geom = TRUE - ) - # SpatVector - testthat::expect_true(methods::is(hms_covar, "SpatVector")) - # 5 columns - testthat::expect_equal(ncol(hms_covar), 5) - } -}) - -## 10. GMTED #### -testthat::test_that("calc_gmted returns expected.", { - withr::local_package("terra") - statistics <- c( - "Breakline Emphasis", "Systematic Subsample" - ) - resolutions <- c( - "7.5 arc-seconds", "15 arc-seconds", "30 arc-seconds" - ) - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_gmted) - ) - for (s in seq_along(statistics)) { - statistic <- statistics[s] - for (r in seq_along(resolutions)) { - resolution <- resolutions[r] - for (a in seq_along(radii)) { - gmted <- - process_gmted( - variable = c(statistic, resolution), - path = - testthat::test_path( - "..", - "testdata", - "gmted" - ) - ) - gmted_covariate <- - calc_gmted( - from = gmted, - locs = ncp, - locs_id = "site_id", - radius = radii[a], - fun = "mean" - ) - # set column names - gmted_covariate <- calc_setcolumns( - from = gmted_covariate, - lag = 0, - dataset = "gmted", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(gmted_covariate) == "data.frame" - ) - # expect 2 columns - expect_true( - ncol(gmted_covariate) == 3 - ) - # expect numeric value - expect_true( - class(gmted_covariate[, 3]) == "numeric" - ) - } - } - } - testthat::expect_no_error( - gmted <- process_gmted( - variable = c("Breakline Emphasis", "7.5 arc-seconds"), - testthat::test_path( - "..", "testdata", "gmted", "be75_grd" - ) - ) - ) - testthat::expect_no_error( - gmted_geom <- calc_gmted( - gmted, - ncp, - "site_id", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(gmted_geom), 3 - ) - testthat::expect_true( - "SpatVector" %in% class(gmted_geom) - ) -}) - -## 11. NARR #### -testthat::test_that("calc_narr returns expected.", { - withr::local_package("terra") - variables <- c( - "weasd", - "omega" - ) - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_narr) - ) - for (v in seq_along(variables)) { - variable <- variables[v] - for (r in seq_along(radii)) { - narr <- - process_narr( - date = c("2018-01-01", "2018-01-01"), - variable = variable, - path = - testthat::test_path( - "..", - "testdata", - "narr", - variable - ) - ) - narr_covariate <- - calc_narr( - from = narr, - locs = ncp, - locs_id = "site_id", - radius = radii[r], - fun = "mean" - ) - # set column names - narr_covariate <- calc_setcolumns( - from = narr_covariate, - lag = 0, - dataset = "narr", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(narr_covariate) == "data.frame" - ) - if (variable == "weasd") { - # expect 3 columns (no pressure level) - expect_true( - ncol(narr_covariate) == 3 - ) - # expect numeric value - expect_true( - class(narr_covariate[, 3]) == "numeric" - ) - } else { - # expect 4 columns - expect_true( - ncol(narr_covariate) == 4 - ) - # expect numeric value - expect_true( - class(narr_covariate[, 4]) == "numeric" - ) - } - # expect $time is class Date - expect_true( - "POSIXct" %in% class(narr_covariate$time) - ) - } - } - # with geometry - testthat::expect_no_error( - narr_covariate_geom <- calc_narr( - from = narr, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(narr_covariate_geom), 4 # 4 columns because omega has pressure levels - ) - testthat::expect_true( - "SpatVector" %in% class(narr_covariate_geom) - ) -}) - -## 11. GEOS-CF #### -testthat::test_that("calc_geos returns as expected.", { - withr::local_package("terra") - withr::local_package("data.table") - collections <- c( - "a", - "c" - ) - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_geos) - ) - for (c in seq_along(collections)) { - collection <- collections[c] - for (r in seq_along(radii)) { - geos <- - process_geos( - date = c("2018-01-01", "2018-01-01"), - variable = "O3", - path = - testthat::test_path( - "..", - "testdata", - "geos", - collection - ) - ) - geos_covariate <- - calc_geos( - from = geos, - locs = data.table::data.table(ncp), - locs_id = "site_id", - radius = radii[r], - fun = "mean" - ) - # set column names - geos_covariate <- calc_setcolumns( - from = geos_covariate, - lag = 0, - dataset = "geos", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(geos_covariate) == "data.frame" - ) - # expect 4 columns - expect_true( - ncol(geos_covariate) == 4 - ) - # expect numeric value - expect_true( - class(geos_covariate[, 4]) == "numeric" - ) - # expect $time is class POSIXt - expect_true( - "POSIXt" %in% class(geos_covariate$time) - ) - } - } - # with included geometry - testthat::expect_no_error( - geos_covariate_geom <- calc_geos( - from = geos, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(geos_covariate_geom), 4 - ) - testthat::expect_true( - "SpatVector" %in% class(geos_covariate_geom) - ) -}) - -## 12. SEDAC: Population #### -testthat::test_that("calc_sedac_population returns as expected.", { - withr::local_package("terra") - withr::local_package("data.table") - paths <- list.files(testthat::test_path( - "..", "testdata", "population" - ), full.names = TRUE) - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_sedac_population) - ) - for (p in seq_along(paths)) { - path <- paths[p] - for (r in seq_along(radii)) { - pop <- - process_sedac_population( - path = paths - ) - pop_covariate <- - calc_sedac_population( - from = pop, - locs = data.table::data.table(ncp), - locs_id = "site_id", - radius = radii[r], - fun = "mean" - ) - # set column names - pop_covariate <- calc_setcolumns( - from = pop_covariate, - lag = 0, - dataset = "pop", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(pop_covariate) == "data.frame" - ) - # expect 4 columns - expect_true( - ncol(pop_covariate) == 3 - ) - # expect numeric value - expect_true( - class(pop_covariate[, 3]) == "numeric" - ) - # expect $time is class integer for year - expect_true( - "integer" %in% class(pop_covariate$time) - ) - } - } - # with included geometry - testthat::expect_no_error( - pop_covariate_geom <- calc_sedac_population( - from = pop, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(pop_covariate_geom), 3 - ) - testthat::expect_true( - "SpatVector" %in% class(pop_covariate_geom) - ) -}) - -## 13. SEDAC: Global Roads #### -testthat::test_that("groads calculation works", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_options(list(sf_use_s2 = FALSE)) - - # test data generation - ncp <- data.frame( - site_id = c("1", "2"), - lon = c(-78.899, -78.643669), - lat = c(35.8774, 35.785342), - time = c(2022, 2022) - ) - # ncp <- terra::vect(ncp, keepgeom = TRUE, crs = "EPSG:4326") - path_groads <- testthat::test_path("..", "testdata", "groads_test.shp") - groads <- terra::vect(path_groads) - - testthat::expect_no_error( - groads_res <- calc_sedac_groads( - from = groads, - locs = ncp, - locs_id = "site_id", - radius = 5000 - ) - ) - - testthat::expect_error( - calc_sedac_groads( - from = groads, - locs = ncp, - locs_id = "site_id", - radius = 0 - ) - ) - - # expect data.frame - testthat::expect_s3_class(groads_res, "data.frame") - - # return with geometry - testthat::expect_no_error( - groads_geom <- calc_sedac_groads( - from = groads, - locs = ncp, - locs_id = "site_id", - radius = 5000, - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(groads_geom), 4 - ) - testthat::expect_true( - "SpatVector" %in% class(groads_geom) - ) -}) - - -## 14. MERRA2 #### -testthat::test_that("calc_merra2 returns as expected.", { - withr::local_package("terra") - withr::local_package("data.table") - #* indicates three dimensional data that has subset to single - #* pressure level for test data set - collections <- c( - "inst1_2d_int_Nx", "inst3_2d_gas_Nx", "inst3_3d_chm_Nv", #* - "inst6_3d_ana_Np", #* - "statD_2d_slv_Nx", "tavg1_2d_chm_Nx", "tavg3_3d_udt_Np" #* - ) - variables <- c( - "CPT", "AODANA", "AIRDENS", #* - "SLP", #* - "HOURNORAIN", "COCL", "DUDTANA" #* - ) - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_merra2) - ) - for (c in seq_along(collections)) { - collection <- collections[c] - variable <- variables[c] - for (r in seq_along(radii)) { - merra2 <- - process_merra2( - date = c("2018-01-01", "2018-01-01"), - variable = variable, - path = - testthat::test_path( - "..", - "testdata", - "merra2", - collection - ) - ) - merra2_covariate <- - calc_merra2( - from = merra2, - locs = data.table::data.table(ncp), - locs_id = "site_id", - radius = radii[r], - fun = "mean" - ) - # set column names - merra2_covariate <- calc_setcolumns( - from = merra2_covariate, - lag = 0, - dataset = "merra2", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(merra2_covariate) == "data.frame" - ) - if (grepl("lev", names(merra2)[1])) { - # expect 4 columns - expect_true( - ncol(merra2_covariate) == 4 - ) - # expect numeric value - expect_true( - class(merra2_covariate[, 4]) == "numeric" - ) - } else { - # expect 3 columns - expect_true( - ncol(merra2_covariate) == 3 - ) - # expect numeric value - expect_true( - class(merra2_covariate[, 3]) == "numeric" - ) - } - # expect $time is class Date - expect_true( - "POSIXt" %in% class(merra2_covariate$time) - ) - } - } - # with included geometry - testthat::expect_no_error( - merra2_covariate_geom <- calc_merra2( - from = merra2, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(merra2_covariate_geom), 4 - ) - testthat::expect_true( - "SpatVector" %in% class(merra2_covariate_geom) - ) -}) - -## 15. GRIDMET #### -testthat::test_that("calc_gridmet returns as expected.", { - withr::local_package("terra") - withr::local_package("data.table") - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_gridmet) - ) - for (r in seq_along(radii)) { - gridmet <- - process_gridmet( - date = c("2018-01-03", "2018-01-03"), - variable = "pr", - path = - testthat::test_path( - "..", - "testdata", - "gridmet", - "pr" - ) - ) - gridmet_covariate <- - calc_gridmet( - from = gridmet, - locs = data.table::data.table(ncp), - locs_id = "site_id", - radius = radii[r], - fun = "mean" - ) - # set column names - gridmet_covariate <- calc_setcolumns( - from = gridmet_covariate, - lag = 0, - dataset = "gridmet", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(gridmet_covariate) == "data.frame" - ) - # expect 3 columns - expect_true( - ncol(gridmet_covariate) == 3 - ) - # expect numeric value - expect_true( - class(gridmet_covariate[, 3]) == "numeric" - ) - # expect $time is class Date - expect_true( - "POSIXt" %in% class(gridmet_covariate$time) - ) - } - # with included geometry - testthat::expect_no_error( - gridmet_covariate_geom <- calc_gridmet( - from = gridmet, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(gridmet_covariate_geom), 3 - ) - testthat::expect_true( - "SpatVector" %in% class(gridmet_covariate_geom) - ) -}) - -## 16. TerraClimate #### -testthat::test_that("calc_terraclimate returns as expected.", { - withr::local_package("terra") - withr::local_package("data.table") - radii <- c(0, 1000) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - expect_true( - is.function(calc_terraclimate) - ) - for (r in seq_along(radii)) { - terraclimate <- - process_terraclimate( - date = c("2018-01-01", "2018-01-01"), - variable = "Precipitation", - path = - testthat::test_path( - "..", - "testdata", - "terraclimate", - "ppt" - ) - ) - terraclimate_covariate <- - calc_terraclimate( - from = terraclimate, - locs = data.table::data.table(ncp), - locs_id = "site_id", - radius = radii[r], - fun = "mean" - ) - # set column names - terraclimate_covariate <- calc_setcolumns( - from = terraclimate_covariate, - lag = 0, - dataset = "terraclimate", - locs_id = "site_id" - ) - # expect output is data.frame - expect_true( - class(terraclimate_covariate) == "data.frame" - ) - # expect 3 columns - expect_true( - ncol(terraclimate_covariate) == 3 - ) - # expect numeric value - expect_true( - class(terraclimate_covariate[, 3]) == "numeric" - ) - # expect date and time column - expect_true( - nchar(terraclimate_covariate$time)[1] == 6 - ) - } - # with included geometry - testthat::expect_no_error( - terraclimate_covariate_geom <- calc_terraclimate( - from = terraclimate, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean", - geom = TRUE - ) - ) - testthat::expect_equal( - ncol(terraclimate_covariate_geom), 3 - ) - testthat::expect_true( - "SpatVector" %in% class(terraclimate_covariate_geom) - ) -}) - -## 17. Lagged variables #### -testthat::test_that("calc_lagged returns as expected.", { - withr::local_package("terra") - withr::local_package("data.table") - lags <- c(0, 1, 2) - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - testthat::expect_true( - is.function(calc_lagged) - ) - for (l in seq_along(lags)) { - narr <- - process_narr( - date = c("2018-01-01", "2018-01-10"), - variable = "weasd", - path = - testthat::test_path( - "..", - "testdata", - "narr", - "weasd" - ) - ) - narr_covariate <- - calc_narr( - from = narr, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean" - ) - # set column names - narr_covariate <- calc_setcolumns( - from = narr_covariate, - lag = 0, - dataset = "narr", - locs_id = "site_id" - ) - # expect identical if lag = 0 - if (lags[l] == 0) { - narr_lagged <- calc_lagged( - from = narr_covariate, - date = c("2018-01-01", "2018-01-10"), - lag = lags[l], - locs_id = "site_id", - time_id = "time" - ) - testthat::expect_identical(narr_lagged, narr_covariate) - } else { - # expect error because 2018-01-01 will not have lag data from 2017-12-31 - testthat::expect_error( - calc_lagged( - from = narr_covariate, - date = c("2018-01-01", "2018-01-10"), - lag = lags[l], - locs_id = "site_id", - time_id = "time" - ) - ) - narr_lagged <- calc_lagged( - from = narr_covariate, - date = c("2018-01-05", "2018-01-10"), - lag = lags[l], - locs_id = "site_id", - time_id = "time" - ) - # expect output is data.frame - testthat::expect_true( - class(narr_lagged) == "data.frame" - ) - # expect lag day - testthat::expect_true(grepl("_[0-9]{1}$", colnames(narr_lagged)[3])) - # expect no NA - testthat::expect_true(all(!is.na(narr_lagged))) - } - } -}) - -## lagged variables with geometry -testthat::test_that("calc_lagged(geom = TRUE) works", { - withr::local_package("terra") - withr::local_package("data.table") - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - # expect function - testthat::expect_true( - is.function(calc_lagged) - ) - narr <- process_narr( - date = c("2018-01-01", "2018-01-10"), - variable = "weasd", - path = - testthat::test_path( - "..", - "testdata", - "narr", - "weasd" - ) - ) - narr_covariate <- - calc_narr( - from = narr, - locs = ncp, - locs_id = "site_id", - radius = 0, - fun = "mean" - ) - # set column names - narr_covariate <- calc_setcolumns( - from = narr_covariate, - lag = 0, - dataset = "narr", - locs_id = "site_id" - ) - - # expect error with geom = TRUE and locs as data.frame - testthat::expect_error( - calc_lagged( - from = narr_covariate, - date = c("2018-01-02", "2018-01-04"), - lag = 1, - geom = TRUE - ) - ) -}) - -## 19. Wrapper #### -testthat::test_that("calc_covariates wrapper works", { - - withr::local_package("rlang") - withr::local_package("terra") - withr::local_package("sf") - withr::local_options(list(sf_use_s2 = FALSE)) - - candidates <- - c("modis", "koppen-geiger", - "koeppen-geiger", "koppen", "koeppen", - "geos", "dummies", "gmted", - "sedac_groads", "groads", "roads", - "ecoregions", "ecoregion", "hms", "smoke", - "gmted", "narr", "geos", - "sedac_population", "population", "nlcd", - "merra", "MERRA", "merra2", "MERRA2", - "tri", "nei", "prism", "huc", "cdl") - for (cand in candidates) { - testthat::expect_error( - calc_covariates(covariate = cand) - ) - } -}) - - -testthat::test_that("calc_covariates wrapper works", { - - withr::local_package("rlang") - withr::local_package("terra") - withr::local_package("sf") - withr::local_options(list(sf_use_s2 = FALSE)) - - ncp <- data.frame(lon = -78.8277, lat = 35.95013) - ncp$site_id <- "3799900018810101" - ncp$time <- 2018 - ncpt <- - terra::vect(ncp, geom = c("lon", "lat"), - keepgeom = TRUE, crs = "EPSG:4326") - ncpt$time <- c(2018) - path_tri <- testthat::test_path("..", "testdata", "tri") - - testthat::expect_no_error( - tri_r <- process_tri(path = path_tri, year = 2018) - ) - - testthat::expect_no_error( - tri_c <- calc_covariates( - covariate = "tri", - from = tri_r, - locs = ncpt, - radius = 50000L - ) - ) - testthat::expect_true(is.data.frame(tri_c)) - - candidates <- - c("modis", "koppen-geiger", - "koeppen-geiger", "koppen", "koeppen", - "geos", "dummies", "gmted", - "sedac_groads", "groads", "roads", - "ecoregions", "ecoregion", "hms", "smoke", - "gmted", "narr", "geos", - "sedac_population", "population", "nlcd", - "merra", "merra2", - "gridmet", "terraclimate", - "tri", "nei") - for (cand in candidates) { - testthat::expect_error( - calc_covariates(covariate = cand) - ) - } -}) - -# calc check time -testthat::test_that("calc_check_time identifies missing `time` column.", { - testthat::expect_error( - # provide integer instead of data.frame to provoke error - calc_check_time(12, TRUE) - ) - testthat::expect_message( - # provide data.frame without time to provoke message - calc_check_time( - data.frame(x = 10, y = 20), - true - ) - ) -}) - -# Calc message -testthat::test_that("calc_message exception", - { - testthat::expect_no_error( - calc_message("gmted", "mean", "2020", "year", NULL) - ) - testthat::expect_no_error( - calc_message("narr", "shum", 2000, "year", NULL) - ) - } -) - -# calc time -testthat::test_that("calc time remains", { - testthat::expect_no_error( - rr <- calc_time("eternal", "timeless") - ) - testthat::expect_true(rr == "eternal") -}) - -# calc worker -testthat::test_that("calc_worker remaining", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_package("exactextractr") - withr::local_options(sf_use_s2 = FALSE) - - ncp <- data.frame(lon = -78.8277, lat = 35.95013, time = "boundless") - ncp$site_id <- "3799900018810101" - ncpt <- - terra::vect(ncp, geom = c("lon", "lat"), - keepgeom = TRUE, crs = "EPSG:4326") - nc <- system.file("gpkg/nc.gpkg", package = "sf") - nc <- terra::vect(nc) - nc <- terra::project(nc, "EPSG:4326") - ncrast <- terra::rast(nc, resolution = 0.05) - terra::values(ncrast) <- rgamma(terra::ncell(ncrast), 1, 1e-4) - - testthat::expect_no_error( - cwres <- - calc_worker( - from = ncrast, - dataset = "whatever", - locs_vector = ncpt, - locs_df = ncp, - time = ncpt$time, - time_type = "timeless", - radius = 1e5, - max_cells = 3e7 - ) - ) - testthat::expect_s3_class(cwres, "data.frame") -}) diff --git a/tests/testthat/test-cropscape.R b/tests/testthat/test-cropscape.R new file mode 100644 index 00000000..d03b5493 --- /dev/null +++ b/tests/testthat/test-cropscape.R @@ -0,0 +1,144 @@ +################################################################################ +##### unit and integration tests for CropScape functions + +################################################################################ +##### download_cropscape +testthat::test_that("download_cropscape (no errors - GMU)", { + withr::local_package("httr") + withr::local_package("stringr") + # Set up test data + year <- 2010 + directory_to_save <- paste0(tempdir(), "/cps/") + + # Call the function + testthat::expect_no_error( + download_cropscape( + year = year, + source = "GMU", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE + ) + ) + commands_path <- paste0( + directory_to_save, + "CropScape_CDL_", + "GMU", + "_", + year, + "_", + Sys.Date(), + "_wget_commands.txt" + ) + + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 5) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +test_that("download_cropscape (no errors - USDA)", { + withr::local_package("httr") + withr::local_package("stringr") + # Set up test data + year <- 2010 + directory_to_save <- paste0(tempdir(), "/cps/") + + # Call the function + testthat::expect_no_error( + download_cropscape( + year = year, + source = "USDA", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE + ) + ) + commands_path <- paste0( + directory_to_save, + "CropScape_CDL_", + "USDA", + "_", + year, + "_", + Sys.Date(), + "_wget_commands.txt" + ) + + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 5) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_cropscape (expected errors)", { + # expected errors due to invalid years + # Set up test data + invalid_year <- 1996 + testthat::expect_error(download_cropscape(year = 2020, source = "CMU")) + # Call the function and expect an error + testthat::expect_error( + download_cropscape(year = invalid_year, source = "GMU") + ) + testthat::expect_error( + download_cropscape(year = 2000, source = "USDA") + ) +}) + + +################################################################################ +##### process_cropscape +testthat::test_that("process_cropscape", { + # Set up test data + withr::local_package("terra") + filepath <- + testthat::test_path("..", "testdata/cropscape/cdl_30m_r_nc_2019_sub.tif") + dirpath <- testthat::test_path("..", "testdata/cropscape") + year <- 2019 + + # Call the function + testthat::expect_no_error(result <- process_cropscape(filepath, year)) + testthat::expect_no_error(process_cropscape(dirpath, year)) + + # test with cropping extent + testthat::expect_no_error( + result_ext <- process_cropscape( + filepath, year, extent = terra::ext(result) + ) + ) + + # Check the return type + testthat::expect_true(inherits(result, "SpatRaster")) + + # Check the metadata + testthat::expect_equal( + unname(terra::metags(result)["year"]), + as.character(year) + ) + + # error cases + testthat::expect_error(process_cropscape(path = 0, year = "MILLENNIUM")) + testthat::expect_error( + process_cropscape(path = "/home/some/path", year = "MILLENNIUM") + ) +}) diff --git a/tests/testthat/test-download.R b/tests/testthat/test-download.R new file mode 100644 index 00000000..62923e75 --- /dev/null +++ b/tests/testthat/test-download.R @@ -0,0 +1,196 @@ +################################################################################ +##### unit and integration tests for download_data and auxiliary functions +# nolint start + +################################################################################ +##### download_data +testthat::test_that("download_data (expected errors - acknowledgement)", { + download_datasets <- c("aqs", "ecoregion", "geos", "gmted", "koppen", + "koppengeiger", "merra2", "merra", "narr", + "nlcd", "noaa", "sedac_groads", + "sedac_population", "groads", "population", + "hms", "smoke", "gridmet", + "terraclimate", "huc", "cropscape", "cdl", "prism") + for (d in seq_along(download_datasets)) { + testthat::expect_error( + download_data(dataset_name = download_datasets[d], + acknowledgement = FALSE) + ) + } +}) + +testthat::test_that("download_data (expected errors - directory)", { + download_datasets <- c("aqs", "ecoregion", "geos", "gmted", "koppen", + "koppengeiger", "merra2", "merra", "narr", + "nlcd", "noaa", "sedac_groads", + "sedac_population", "groads", "population", + "hms", "smoke", "gridmet", + "terraclimate", "huc", "cropscape", "cdl", "prism") + for (d in seq_along(download_datasets)) { + testthat::expect_error( + download_data(dataset_name = download_datasets[d], + acknowledgement = TRUE, + directory_to_save = NULL) + ) + } +}) + +testthat::test_that("download_data (expected errors - temporal range)", { + withr::with_tempdir({ + testthat::expect_error( + download_geos( + date = c("1900-01-01", "2018-01-01"), + collection = "aqc_tavg_1hr_g1440x721_v1", + acknowledgement = TRUE, + directory_to_save = "." + ) + ) + testthat::expect_error( + download_aqs( + year = c(1900, 2022), + acknowledgement = TRUE, + directory_to_save = "." + ) + ) + testthat::expect_error( + download_narr( + year = c(1900, 2022), + variables = "air.sfc", + acknowledgement = TRUE, + directory_to_save = "." + ) + ) + testthat::expect_error( + download_merra2( + date = c("1900-01-01", "2023-09-01"), + collection = "inst1_2d_asm_Nx", + directory_to_save = ".", + acknowledgement = TRUE, + remove_command = TRUE + ) + ) + sink() + testthat::expect_error( + download_hms( + date = c("1900-01-01", "2018-01-01"), + directory_to_save = ".", + acknowledgement = TRUE + ) + ) + testthat::expect_error( + download_gridmet( + year = c(1900, 2022), + variables = "Precipitation", + acknowledgement = TRUE, + directory_to_save = "." + ) + ) + testthat::expect_error( + download_terraclimate( + year = c(1900, 2022), + variables = "Wind Speed", + acknowledgement = TRUE, + directory_to_save = ".", + ) + ) + }) +}) + +################################################################################ +##### download_epa_certificate +testthat::test_that("download_epa_certificate", { + testthat::expect_error( + download_epa_certificate("file.txt") + ) + testthat::expect_no_error( + download_epa_certificate(file.path(tempdir(), "file.pem")) + ) + testthat::expect_no_error( + download_epa_certificate( + system.file("extdata/cacert_gaftp_epa.pem", package = "amadeus") + ) + ) +}) + +################################################################################ +##### extract_urls +testthat::test_that("extract_urls", { + commands <- paste0( + "curl -s -o ", + "/PATH/hms_smoke_Shapefile_20230901.zip --url ", + "https://satepsanone.nesdis.noaa.gov/pub/FIRE/web/HMS/Smoke_Polygons/", + "Shapefile/2023/09/hms_smoke20230901.zip" + ) + urls <- extract_urls(commands = commands) + testthat::expect_true( + is.null(urls) + ) +}) + +################################################################################ +##### check_urls +testthat::test_that("check_urls returns NULL undefined size.", { + urls <- paste0( + "https://satepsanone.nesdis.noaa.gov/pub/FIRE/web/HMS/Smoke_Polygons/", + "Shapefile/2023/09/hms_smoke20230901.zip" + ) + url_status <- check_urls(urls = urls, method = "HEAD") + testthat::expect_true( + is.null(url_status) + ) +}) + +testthat::test_that("check_urls handles size > length(urls)", { + urls <- paste0( + "https://satepsanone.nesdis.noaa.gov/pub/FIRE/web/HMS/Smoke_Polygons/", + "Shapefile/2023/09/hms_smoke20230901.zip" + ) + testthat::expect_no_error( + url_status <- check_urls(urls = urls, size = 10, method = "HEAD") + ) + testthat::expect_length(url_status, 1) +}) + +################################################################################ +##### download_sink +testthat::test_that("download_sink", { + dir <- paste0(tempdir(), "/sink/") + dir.create(dir, recursive = TRUE) + testfile <- paste0(dir, "sink_test.txt") + file.create(testfile) + testthat::expect_no_error( + download_sink(testfile) + ) + sink() + Sys.sleep(1.5) + file.remove(testfile) + unlink(dir, recursive = TRUE) +}) + +################################################################################ +##### download_remove_zips +testthat::test_that("download_remove_zips", { + dir <- paste0(tempdir(), "/yellowstone/") + testfile1 <- paste0(dir, "barren/coyote.zip") + dir.create(dirname(testfile1), recursive = TRUE) + file.create(testfile1, recursive = TRUE) + testfile2 <- paste0(dir, "retain/retain.txt") + dir.create(dirname(testfile2), recursive = TRUE) + file.create(testfile2, recursive = TRUE) + testthat::expect_no_error( + download_remove_zips(remove = TRUE, testfile1) + ) + # expect only the testfile1 directory to be removed + testthat::expect_equal( + length( + list.files( + dir, + recursive = TRUE, + include.dirs = TRUE + ) + ), + 2 + ) + unlink(paste0(dir, "/yellowstone")) +}) +# nolint end diff --git a/tests/testthat/test-download_functions.R b/tests/testthat/test-download_functions.R deleted file mode 100644 index 1f3b55c4..00000000 --- a/tests/testthat/test-download_functions.R +++ /dev/null @@ -1,1779 +0,0 @@ -## test for download functions - -testthat::test_that("Error when acknowledgement = FALSE", { - download_datasets <- c("aqs", "ecoregion", "geos", "gmted", "koppen", - "koppengeiger", "merra2", "merra", "narr", - "nlcd", "noaa", "sedac_groads", - "sedac_population", "groads", "population", - "hms", "smoke", "gridmet", - "terraclimate", "huc", "cropscape", "cdl", "prism") - for (d in seq_along(download_datasets)) { - expect_error( - download_data(dataset_name = download_datasets[d], - acknowledgement = FALSE), - paste0("Please refer to the argument list and ", - "the error message above to rectify the error.\n") - ) - } -}) - -testthat::test_that("Error when one parameter is NULL.", { - download_datasets <- c("aqs", "ecoregion", "geos", "gmted", "koppen", - "koppengeiger", "merra2", "merra", "narr", - "nlcd", "noaa", "sedac_groads", - "sedac_population", "groads", "population", - "hms", "smoke", "gridmet", - "terraclimate", "huc", "cropscape", "cdl", "prism") - for (d in seq_along(download_datasets)) { - expect_error( - download_data(dataset_name = download_datasets[d], - acknowledgement = TRUE, - directory_to_save = NULL), - paste0("Please refer to the argument list and ", - "the error message above to rectify the error.\n") - ) - } -}) - - -testthat::test_that("Errors when temporal ranges invalid.", { - withr::with_tempdir({ - expect_error( - download_geos( - date = c("1900-01-01", "2018-01-01"), - collection = "aqc_tavg_1hr_g1440x721_v1", - acknowledgement = TRUE, - directory_to_save = "." - ) - ) - expect_error( - download_aqs( - year = c(1900, 2022), - acknowledgement = TRUE, - directory_to_save = "." - ) - ) - expect_error( - download_narr( - year = c(1900, 2022), - variables = "air.sfc", - acknowledgement = TRUE, - directory_to_save = "." - ) - ) - expect_error( - download_merra2( - date = c("1900-01-01", "2023-09-01"), - collection = "inst1_2d_asm_Nx", - directory_to_save = ".", - acknowledgement = TRUE, - remove_command = TRUE - ) - ) - sink() - expect_error( - download_hms( - date = c("1900-01-01", "2018-01-01"), - directory_to_save = ".", - acknowledgement = TRUE - ) - ) - expect_error( - download_gridmet( - year = c(1900, 2022), - variables = "Precipitation", - acknowledgement = TRUE, - directory_to_save = "." - ) - ) - expect_error( - download_terraclimate( - year = c(1900, 2022), - variables = "Wind Speed", - acknowledgement = TRUE, - directory_to_save = ".", - ) - ) - }) -}) - -testthat::test_that("EPA AQS download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - year_start <- 2018 - year_end <- 2022 - resolution_temporal <- "daily" - parameter_code <- 88101 - directory_to_save <- paste0(tempdir(), "/epa/") - # run download function - download_data(dataset_name = "aqs", - year = c(year_start, year_end), - directory_to_save = directory_to_save, - acknowledgement = TRUE, - unzip = FALSE, - remove_zip = FALSE, - download = FALSE, - remove_command = FALSE) - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # define file path with commands - commands_path <- - paste0( - download_sanitize_path(directory_to_save), - "aqs_", - parameter_code, - "_", - year_start, "_", year_end, - "_", - resolution_temporal, - "_curl_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 4) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tets - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - unlink(directory_to_save, recursive = TRUE) -}) - -# Ecoregion tests #### -testthat::test_that("Ecoregion download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - directory_to_save <- paste0(tempdir(), "/eco/") - certificate <- system.file("extdata/cacert_gaftp_epa.pem", - package = "amadeus") - # run download function - download_data(dataset_name = "ecoregion", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - unzip = FALSE, - remove_zip = FALSE, - download = FALSE, - remove_command = FALSE, - epa_certificate_path = certificate) - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # define file path with commands - commands_path <- paste0( - download_sanitize_path(directory_to_save), - "us_eco_l3_state_boundaries_", - Sys.Date(), - "_wget_command.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 3) - # check HTTP URL status - url_status <- - httr::HEAD(urls, config = httr::config(cainfo = certificate)) - url_status <- url_status$status_code - # implement unit tets - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - - file.create( - file.path(directory_to_save, "zip_files", - "us_eco_l3_state_boundaries.zip"), - recursive = TRUE - ) - testthat::expect_no_error( - download_data( - dataset_name = "ecoregion", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - unzip = FALSE, - remove_zip = TRUE, - download = FALSE, - remove_command = TRUE, - epa_certificate_path = certificate - ) - ) - testthat::expect_true( - dir.exists(paste0(directory_to_save, "/data_files")) - ) - testthat::expect_equal( - length( - list.files( - directory_to_save, recursive = TRUE, include.dirs = TRUE - ) - ), - 1 - ) - unlink(directory_to_save, recursive = TRUE) -}) - -# GEOS-CF tests #### -testthat::test_that("GEOS-CF download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - date_start <- "2019-09-09" - date_end <- "2019-09-09" - collections <- c("aqc_tavg_1hr_g1440x721_v1", - "chm_inst_1hr_g1440x721_p23") - directory_to_save <- paste0(tempdir(), "/geos/") - # run download function - testthat::expect_no_error( - download_data(dataset_name = "geos", - date = c(date_start, date_end), - collection = collections, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE) - ) - # define file path with commands - commands_path <- paste0(directory_to_save, - "geos_", - date_start, - "_", - date_end, - "_wget_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 2) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 2L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - -# GMTED tests #### -testthat::test_that("GMTED download URLs have HTTP status 200.", { - withr::local_package("httr") - # function parameters - statistics <- c("Breakline Emphasis", - "Standard Deviation Statistic") - resolution <- "7.5 arc-seconds" - directory_to_save <- paste0(tempdir(), "/gmted/") - for (s in seq_along(statistics)) { - # run download function - download_data(dataset_name = "gmted", - statistic = statistics[s], - resolution = resolution, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - unzip = FALSE, - remove_zip = FALSE, - download = FALSE) - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # define file path with commands - commands_path <- paste0(download_sanitize_path(directory_to_save), - "gmted_", - gsub(" ", "", statistics[s]), - "_", - gsub(" ", "", resolution), - "_", - Sys.Date(), - "_curl_command.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 6) - filename <- extract_urls(commands = commands, position = 4) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - - file.create( - file.path(filename), - recursive = TRUE - ) - file.create( - file.path( - paste0(directory_to_save, "/data_files/test.txt") - ) - ) - # remove file with commands after test - # remove temporary gmted - testthat::expect_no_error( - download_data(dataset_name = "gmted", - statistic = statistics[s], - resolution = resolution, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - unzip = FALSE, - remove_zip = TRUE, - remove_command = TRUE, - download = FALSE) - ) - testthat::expect_true( - dir.exists(paste0(directory_to_save, "/data_files")) - ) - testthat::expect_equal( - length( - list.files( - directory_to_save, recursive = TRUE, include.dirs = TRUE - ) - ), - 2 - ) - unlink(directory_to_save, recursive = TRUE) - } -}) - -# MERRA2 #### -testthat::test_that("MERRA2 download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - date_start <- "2022-02-14" - date_end <- "2022-03-08" - collections <- c("inst1_2d_asm_Nx", "inst3_3d_asm_Np") - directory_to_save <- paste0(tempdir(), "/merra2/") - # run download function - testthat::expect_no_error( - download_data(dataset_name = "merra2", - date = c(date_start, date_end), - collection = collections, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE) - ) - # define path with commands - commands_path <- paste0(directory_to_save, - "merra2_", - date_start, - "_", - date_end, - "_wget_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 2) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 3L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - -# MERRA2 Collection error test #### -testthat::test_that("MERRA2 returns message with unrecognized collection.", { - # function parameters - collections <- "uNrEcOgNiZeD" - directory_to_save <- testthat::test_path("..", "testdata/", "") - testthat::expect_error( - download_data( - dataset_name = "merra", - collection = collections, - directory_to_save = directory_to_save, - acknowledgement = TRUE - ) - ) -}) - -## NARR (monolevel and pressure level) -testthat::test_that("NARR download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - year_start <- 2018 - year_end <- 2018 - variables <- c("weasd", "omega", "soill") # includes monolevel, pressure level, subsurface - directory_to_save <- paste0(tempdir(), "/narr/") - # run download function - download_data(dataset_name = "narr", - year = c(year_start, year_end), - variables = variables, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE) - # define path with commands - commands_path <- paste0(directory_to_save, - "narr_", - year_start, "_", year_end, - "_curl_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 6) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 5L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("NARR error with invalid years.", { - testthat::expect_error( - download_data( - dataset_name = "narr", - variables = "weasd", - year = c(10, 11), - acknowledgement = TRUE, - directory_to_save = testthat::test_path("..", "testdata/", "") - ) - ) -}) - -#### NARR variable sorting function -testthat::test_that("narr_variable with unrecognized variable.", { - testthat::expect_error( - narr_variable("uNrEcOgNiZed") - ) -}) - -testthat::test_that("NOAA HMS Smoke download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - date_start <- "2022-08-12" - date_end <- "2022-09-21" - directory_to_save <- paste0(tempdir(), "/hms/") - data_formats <- c("Shapefile", "KML") - for (d in seq_along(data_formats)) { - # run download function - download_data(dataset_name = "smoke", - date = c(date_start, date_end), - data_format = data_formats[d], - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE, - unzip = FALSE, - remove_zip = FALSE) - # define file path with commands - commands_path <- paste0(download_sanitize_path(directory_to_save), - "hms_smoke_", - gsub("-", "", date_start), - "_", - gsub("-", "", date_end), - "_curl_commands.txt") - # expect sub-directories to be created - if (data_formats[d] == "Shapefile") { - expected_folders <- 3 - } else { - expected_folders <- 2 - } - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == expected_folders - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 6) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 10L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - # remove temporary hms - unlink(directory_to_save, recursive = TRUE) - } -}) - -testthat::test_that("download_hms error for unzip and directory.", { - error_directory <- paste0(tempdir(), "/error/") - testthat::expect_error( - download_data( - dataset_name = "hms", - acknowledgement = TRUE, - directory_to_save = error_directory, - unzip = FALSE, - remove_zip = TRUE - ) - ) - unlink(error_directory, recursive = TRUE) -}) - -testthat::test_that("NLCD download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - years <- c(2021, 2019, 2016) - collections <- c(rep("Coterminous United States", 2), "Alaska") - collection_codes <- c(rep("l48", 2), "ak") - directory_to_save <- paste0(tempdir(), "/nlcd/") - # run download function - for (y in seq_along(years)) { - download_data(dataset_name = "nlcd", - year = years[y], - collection = collections[y], - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE, - unzip = FALSE, - remove_zip = FALSE) - # define file path with commands - commands_path <- paste0(download_sanitize_path(directory_to_save), - "nlcd_", - years[y], - "_land_cover_", - collection_codes[y], - "_", - Sys.Date(), - "_curl_command.txt") - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 5) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - } - testthat::expect_error( - download_data(dataset_name = "nlcd", - year = 2000, - collection = "Coterminous United States", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = TRUE, - unzip = FALSE, - remove_zip = FALSE) - ) - # remove temporary nlcd - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("SEDAC groads download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - data_regions <- c("Americas", "Global") - data_formats <- c("Geodatabase", "Shapefile") - directory_to_save <- paste0(tempdir(), "/groads/") - # run download function - for (r in seq_along(data_regions)) { - data_region <- data_regions[r] - for (f in seq_along(data_formats)) { - download_data(dataset_name = "sedac_groads", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - data_format = data_formats[f], - data_region = data_region, - download = FALSE, - unzip = FALSE, - remove_zip = FALSE, - remove_command = FALSE) - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # define file path with commands - commands_path <- paste0(download_sanitize_path(directory_to_save), - "sedac_groads_", - gsub(" ", "_", tolower(data_region)), - "_", - Sys.Date(), - "_curl_command.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 11) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "GET") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - } - } - - testthat::expect_message( - download_data(dataset_name = "sedac_groads", - data_format = "Shapefile", - data_region = "Global", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - unzip = FALSE, - remove_zip = FALSE, - remove_command = TRUE) - ) - # remove temporary groads - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("SEDAC population download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - years <- c("2020", "all") - data_formats <- c("GeoTIFF", "ASCII") - data_resolutions <- cbind(c("30 second"), - c("30_sec")) - directory_to_save <- paste0(tempdir(), "/pop/") - for (f in seq_along(data_formats)) { - data_format <- data_formats[f] - for (y in seq_along(years)) { - year <- years[y] - for (r in seq_len(nrow(data_resolutions))) { - # run download function - download_data(dataset_name = "sedac_population", - year = year, - data_format = data_format, - data_resolution = data_resolutions[r, 1], - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - unzip = FALSE, - remove_zip = FALSE, - remove_command = FALSE) - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # define file path with commands - if (year == "all") { - year <- "totpop" - } else { - year <- year - } - if (year == "totpop" && data_resolutions[r, 2] == "30_sec") { - resolution <- "2pt5_min" - } else { - resolution <- data_resolutions[r, 2] - } - commands_path <- paste0(download_sanitize_path(directory_to_save), - "sedac_population_", - year, - "_", - resolution, - "_", - Sys.Date(), - "_curl_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 11) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "GET") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - # remove temporary population - unlink(directory_to_save, recursive = TRUE) - } - } - } -}) - -testthat::test_that("SEDAC population data types are coerced.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - year <- c("all") - data_formats <- c("GeoTIFF", "ASCII", "netCDF") - data_resolutions <- c("30 second", "2pt5_min") - directory_to_save <- paste0(tempdir(), "/pop/") - for (f in seq_along(data_formats)) { - download_data(dataset_name = "sedac_population", - year = year, - data_format = data_formats[f], - data_resolution = data_resolutions[1], - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - unzip = FALSE, - remove_zip = FALSE, - remove_command = FALSE) - commands_path <- paste0(directory_to_save, - "sedac_population_", - "totpop", - "_", - data_resolutions[2], - "_", - Sys.Date(), - "_curl_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 11) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "GET") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) - } -}) - -testthat::test_that("Koppen Geiger download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - time_periods <- c("Present", "Future") - data_resolutions <- c("0.0083") - directory_to_save <- paste0(tempdir(), "/kop/") - # run download function - for (p in seq_along(time_periods)) { - time_period <- time_periods[p] - for (d in seq_along(data_resolutions)) { - download_data(dataset_name = "koppen", - time_period = time_period, - data_resolution = data_resolutions[d], - directory_to_save = directory_to_save, - acknowledgement = TRUE, - unzip = FALSE, - remove_zip = FALSE, - download = FALSE, - remove_command = FALSE) - # define file path with commands - commands_path <- paste0(download_sanitize_path(directory_to_save), - "koppen_geiger_", - time_period, - "_", - gsub("\\.", - "p", - data_resolutions[d]), - "_", - Sys.Date(), - "_wget_command.txt") - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 2) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - } - } - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("MODIS-MOD09GA download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - years <- 2020 - product <- "MOD09GA" - version <- "61" - horizontal_tiles <- c(12, 13) - vertical_tiles <- c(5, 6) - nasa_earth_data_token <- "tOkEnPlAcEhOlDeR" - directory_to_save <- paste0(tempdir(), "/mod/") - for (y in seq_along(years)) { - date_start <- paste0(years[y], "-06-20") - date_end <- paste0(years[y], "-06-24") - # run download function - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = version, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - # define file path with commands - commands_path <- paste0( - directory_to_save, - product, - "_", - date_start, - "_", - date_end, - "_wget_commands.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path)[, 2] - # extract urls - urls <- extract_urls(commands = commands, position = 4) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 3L, method = "SKIP") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - } - unlink(directory_to_save, recursive = TRUE) -}) - - -testthat::test_that("MODIS-MOD06L2 download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - product <- "MOD06_L2" - version <- "61" - date_start <- "2019-02-18" - date_end <- "2019-02-18" - nasa_earth_data_token <- "tOkEnPlAcEhOlDeR" - horizontal_tiles <- c(8, 10) - vertical_tiles <- c(4, 5) - directory_to_save <- paste0(tempdir(), "/mod/") - - testthat::expect_error( - kax <- download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = version, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - mod06_links = NULL, - remove_command = FALSE) - ) - # link check - tdir <- tempdir() - faux_urls <- - rbind( - c(4387858920, - "/archive/allData/61/MOD06_L2/2019/049/MOD06_L2.A2019049.0720.061.2019049194350.hdf", - 28267915) - ) - - faux_urls <- data.frame(faux_urls) - mod06_scenes <- paste0(tdir, "/mod06_example.csv") - write.csv(faux_urls, mod06_scenes, row.names = FALSE) - - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = version, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - mod06_links = mod06_scenes, - remove_command = FALSE) - - # define file path with commands - commands_path <- paste0( - directory_to_save, - product, - "_", - date_start, - "_", - date_end, - "_wget_commands.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path)[, 2] - # extract urls - urls <- extract_urls(commands = commands, position = 4) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "SKIP") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - - -testthat::test_that("MODIS download error cases.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - years <- 2020 - product <- c("MOD09GA", "MOD11A1", "MOD13A2", "MCD19A2") - product <- sample(product, 1L) - version <- "61" - horizontal_tiles <- c(12, 13) - vertical_tiles <- c(5, 6) - nasa_earth_data_token <- "tOkEnPlAcEhOlDeR" - directory_to_save <- paste0(tempdir(), "/mod/") - date_start <- paste0(years, "-06-25") - date_end <- paste0(years, "-06-28") - - # no token - testthat::expect_no_error( - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = version, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - ) - - # no token - testthat::expect_error( - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = version, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = NULL, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - ) - - # year difference between date_start and date_end - testthat::expect_error( - download_data(dataset_name = "modis", - date = c(date_start, "2024-03-28"), - product = "MOD11A1", - version = version, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - ) - - # null version - testthat::expect_error( - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = NULL, - horizontal_tiles = horizontal_tiles, - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - ) - - # invalid tile range (horizontal) - testthat::expect_error( - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = "61", - horizontal_tiles = c(-13, -3), - vertical_tiles = vertical_tiles, - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - ) - - # invalid tile range (horizontal) - testthat::expect_error( - download_data(dataset_name = "modis", - date = c(date_start, date_end), - product = product, - version = "61", - horizontal_tiles = horizontal_tiles, - vertical_tiles = c(100, 102), - nasa_earth_data_token = nasa_earth_data_token, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - ) - - # define file path with commands - commands_path <- paste0( - directory_to_save, - product, - "_", - date_start, - "_", - date_end, - "_wget_commands.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path)[, 2] - # extract urls - urls <- extract_urls(commands = commands, position = 4) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 2L, method = "SKIP") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - - -testthat::test_that("EPA TRI download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - directory_to_save <- paste0(tempdir(), "/tri/") - # run download function - download_data(dataset_name = "tri", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE) - year_start <- 2018L - year_end <- 2022L - - # define file path with commands - commands_path <- paste0( - directory_to_save, - "TRI_", - year_start, "_", year_end, - "_", - Sys.Date(), - "_curl_commands.txt" - ) - - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 3) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "SKIP") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - - -testthat::test_that("EPA NEI (AADT) download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - directory_to_save <- paste0(tempdir(), "/nei/") - certificate <- system.file("extdata/cacert_gaftp_epa.pem", - package = "amadeus") - # run download function - year <- c(2017L, 2020L) - download_data(dataset_name = "nei", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - year = year, - remove_command = FALSE, - epa_certificate_path = certificate - ) - # expect sub-directories to be created - testthat::expect_true( - length( - list.files( - directory_to_save, include.dirs = TRUE - ) - ) == 3 - ) - # define file path with commands - commands_path <- paste0( - download_sanitize_path(directory_to_save), - "NEI_AADT_", - paste(year, collapse = "-"), - "_", - Sys.Date(), - "_wget_commands.txt" - ) - - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 3) - # check HTTP URL status - url_status <- - httr::HEAD(urls[1], config = httr::config(cainfo = certificate)) - url_status <- url_status$status_code - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - # remove temporary nei - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("EPA NEI (AADT) download LIVE.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - directory_to_save <- paste0(tempdir(), "/nei/") - certificate <- system.file("extdata/cacert_gaftp_epa.pem", - package = "amadeus") - # run download function - year <- c(2017L, 2020L) - testthat::expect_no_error( - download_data(dataset_name = "nei", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = TRUE, - year = year, - remove_command = FALSE, - epa_certificate_path = certificate, - unzip = TRUE - ) - ) - testthat::expect_equal( - length(list.files(paste0(directory_to_save, "/zip_files"))), 2 - ) - testthat::expect_equal( - length(list.files( - paste0(directory_to_save, "/data_files"), - recursive = TRUE) - ), 12 - ) - # remove temporary nei - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("Test error cases in EPA gaftp sources 1", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - tdir <- tempdir() - directory_to_save <- paste0(tempdir(), "/epa/") - certificate <- file.path(tdir, "cacert_gaftp_epa.pem") - # remove if there is a preexisting file - if (file.exists(certificate)) { - file.remove(certificate) - file.remove(gsub("pem", "crt", certificate)) - } - - # run download function - year <- c(2017L) - testthat::expect_message( - download_data(dataset_name = "nei", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - year = year, - remove_command = FALSE, - epa_certificate_path = certificate - ) - ) - # define file path with commands - commands_path <- paste0( - directory_to_save, - "NEI_AADT_", - paste(year, collapse = "-"), - "_", - Sys.Date(), - "_wget_commands.txt" - ) - # remove file with commands after test - testthat::expect_true(file.exists(commands_path)) - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("Test error cases in EPA gaftp sources 2", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - tdir <- tempdir(check = TRUE) - directory_to_save <- paste0(tempdir(), "/epa/") - certificate <- file.path(tdir, "cacert_gaftp_epa.pem") - # remove if there is a preexisting file - if (file.exists(certificate)) { - file.remove(certificate) - file.remove(gsub("pem", "crt", certificate)) - } - - # run download function - - testthat::expect_message( - download_data(dataset_name = "ecoregion", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE, - unzip = FALSE, - remove_zip = FALSE, - epa_certificate_path = certificate - ) - ) - # unlink dir - unlink(tdir) - - # define file path with commands - commands_path <- paste0( - directory_to_save, - "us_eco_l3_state_boundaries_", - Sys.Date(), - "_wget_command.txt" - ) - - # remove file with commands after test - testthat::expect_true(file.exists(commands_path)) - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - - -testthat::test_that("epa certificate", { - testthat::expect_error( - download_epa_certificate("file.txt") - ) - testthat::expect_no_error( - download_epa_certificate(file.path(tempdir(), "file.pem")) - ) - testthat::expect_no_error( - download_epa_certificate( - system.file("extdata/cacert_gaftp_epa.pem", package = "amadeus") - ) - ) -}) - - -testthat::test_that("extract_urls returns NULL undefined position.", { - commands <- paste0( - "curl -s -o ", - "/PATH/hms_smoke_Shapefile_20230901.zip --url ", - "https://satepsanone.nesdis.noaa.gov/pub/FIRE/web/HMS/Smoke_Polygons/", - "Shapefile/2023/09/hms_smoke20230901.zip" - ) - urls <- extract_urls(commands = commands) - testthat::expect_true( - is.null(urls) - ) -}) - -testthat::test_that("check_urls returns NULL undefined size.", { - urls <- paste0( - "https://satepsanone.nesdis.noaa.gov/pub/FIRE/web/HMS/Smoke_Polygons/", - "Shapefile/2023/09/hms_smoke20230901.zip" - ) - url_status <- check_urls(urls = urls, method = "HEAD") - testthat::expect_true( - is.null(url_status) - ) -}) - -testthat::test_that("download_hms LIVE run.", { - # function parameters - date <- "2018-01-01" - directory <- paste0(tempdir(), "/hms/") - # run download function - download_data( - dataset_name = "hms", - date = c(date, date), - directory_to_save = directory, - acknowledgement = TRUE, - download = TRUE, - unzip = TRUE, - remove_zip = FALSE, - remove_command = FALSE - ) - Sys.sleep(1.5) - testthat::expect_true( - length(list.files(directory, recursive = TRUE, include.dirs = TRUE)) == 8 - ) - commands <- list.files(directory, pattern = ".txt", full.names = TRUE) - testthat::expect_true( - file.exists(commands) - ) - # remove directory - unlink(directory, recursive = TRUE) -}) - -testthat::test_that("gridmet download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - year_start <- 2018 - year_end <- 2023 - variables <- "Precipitation" - directory_to_save <- paste0(tempdir(), "/gridmet/") - # run download function - download_data(dataset_name = "gridmet", - year = c(year_start, year_end), - variables = variables, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE) - # define path with commands - commands_path <- paste0(directory_to_save, - "/gridmet_", - year_start, "_", year_end, - "_curl_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 6) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 5L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("gridmet error with invalid years.", { - testthat::expect_error( - download_data( - dataset_name = "gridmet", - variables = "Precipitation", - year = c(10, 11), - acknowledgement = TRUE, - directory_to_save = paste0(tempdir(), "/g/") - ) - ) -}) - -testthat::test_that("gridmet error with invalid variables", { - testthat::expect_error( - download_data( - dataset_name = "gridmet", - variables = "temp", - year = c(2018, 2018), - acknowledgement = TRUE, - directory_to_save = paste0(tempdir(), "/g/") - ) - ) -}) - -testthat::test_that("terraclimate download URLs have HTTP status 200.", { - withr::local_package("httr") - withr::local_package("stringr") - # function parameters - year_start <- 2018 - year_end <- 2023 - variables <- "Precipitation" - directory_to_save <- paste0(tempdir(), "/terracclimate/") - # run download function - download_data(dataset_name = "terraclimate", - year = c(year_start, year_end), - variables = variables, - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE) - # define path with commands - commands_path <- paste0(directory_to_save, - "/terraclimate_", - year_start, "_", year_end, - "_curl_commands.txt") - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 6) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 5L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - -testthat::test_that("terraclimate error with invalid years.", { - testthat::expect_error( - download_data( - dataset_name = "terraclimate", - variables = "Precipitation", - year = c(10, 11), - acknowledgement = TRUE, - directory_to_save = paste0(tempdir(), "/epa/") - ) - ) -}) - -testthat::test_that("terraclimate error with invalid variables", { - testthat::expect_error( - download_data( - dataset_name = "gridmet", - variables = "temp", - year = c(2018, 2018), - acknowledgement = TRUE, - directory_to_save = paste0(tempdir(), "/epa/") - ) - ) -}) - - -testthat::test_that("download_cropscape throws an error for invalid year", { - # Set up test data - invalid_year <- 1996 - testthat::expect_error(download_cropscape(year = 2020, source = "CMU")) - # Call the function and expect an error - testthat::expect_error(download_cropscape(year = invalid_year, source = "GMU")) - testthat::expect_error(download_cropscape(year = 2000, source = "USDA")) -}) - -testthat::test_that("download_cropscape generates correct download commands (GMU)", { - withr::local_package("httr") - withr::local_package("stringr") - # Set up test data - year <- 2010 - directory_to_save <- paste0(tempdir(), "/cps/") - - # Call the function - testthat::expect_no_error( - download_cropscape( - year = year, - source = "GMU", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE - ) - ) - commands_path <- paste0( - directory_to_save, - "CropScape_CDL_", - "GMU", - "_", - year, - "_", - Sys.Date(), - "_wget_commands.txt" - ) - - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 5) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - - -test_that("download_cropscape generates correct download commands (USDA)", { - withr::local_package("httr") - withr::local_package("stringr") - # Set up test data - year <- 2010 - directory_to_save <- paste0(tempdir(), "/cps/") - - # Call the function - testthat::expect_no_error( - download_cropscape( - year = year, - source = "USDA", - directory_to_save = directory_to_save, - acknowledgement = TRUE, - download = FALSE, - remove_command = FALSE - ) - ) - commands_path <- paste0( - directory_to_save, - "CropScape_CDL_", - "USDA", - "_", - year, - "_", - Sys.Date(), - "_wget_commands.txt" - ) - - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 5) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) -}) - - -testthat::test_that("download_prism downloads the correct data files", { - # Set up test data - time <- seq(201005, 201012, by = 1) - element <- c("ppt", "tmin", "tmax", "tmean", "tdmean", - "vpdmin", "vpdmax") - # in case of multiple test runs - # note that PRISM download for the same data element - # is allowed up to twice a day. IP address could be blocked - # if the limit is exceeded - time <- sample(time, 1) - element <- sample(element, 1) - data_type <- "ts" - format <- "nc" - directory_to_save <- paste0(tempdir(), "/prism/") - acknowledgement <- TRUE - download <- FALSE - remove_command <- FALSE - - # Call the function - download_prism( - time = time, - element = element, - data_type = data_type, - format = format, - directory_to_save = directory_to_save, - acknowledgement = acknowledgement, - download = download, - remove_command = remove_command - ) - - testthat::expect_message( - download_prism( - time = time, - element = "ppt", - data_type = "normals", - format = "asc", - directory_to_save = directory_to_save, - acknowledgement = acknowledgement, - download = download, - remove_command = TRUE - ) - ) - - commands_path <- paste0( - directory_to_save, - "PRISM_", - element, - "_", - data_type, - "_", - time, - "_", - Sys.Date(), - "_wget_commands.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 6) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - - # Set up test data - time <- "202105" - element <- "soltotal" - data_type <- "ts" - format <- "nc" - directory_to_save <- paste0(tempdir(), "/prism/") - acknowledgement <- TRUE - download <- FALSE - remove_command <- FALSE - - # Call the function and expect an error - testthat::expect_error(download_prism( - time = time, - element = element, - data_type = data_type, - format = format, - directory_to_save = directory_to_save, - acknowledgement = acknowledgement, - download = download, - remove_command = remove_command - )) - unlink(directory_to_save, recursive = TRUE) -}) - - - - -testthat::test_that("download_huc works", - { - withr::local_package("httr") - - directory_to_save <- paste0(tempdir(), "/huc/") - allregions <- c("Lower48", "Islands") - alltypes <- c("Seamless", "OceanCatchment") - - for (region in allregions) { - for (type in alltypes) { - testthat::expect_no_error( - download_huc( - region, type, - directory_to_save, - acknowledgement = TRUE, - download = FALSE, - unzip = FALSE - ) - ) - commands_path <- paste0( - directory_to_save, - "USGS_NHD_", - region, - "_", - type, - "_", - Sys.Date(), - "_wget_commands.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 5) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - - } - } - - testthat::expect_error( - download_huc( - "Lower48", "OceanCatchment", - tempdir(), - acknowledgement = TRUE, - download = TRUE, - unzip = TRUE - ) - ) - unlink(directory_to_save, recursive = TRUE) - }) - - -testthat::test_that("download_sink test", { - dir <- paste0(tempdir(), "/sink/") - dir.create(dir, recursive = TRUE) - testfile <- paste0(dir, "sink_test.txt") - file.create(testfile) - testthat::expect_no_error( - download_sink(testfile) - ) - sink() - Sys.sleep(1.5) - file.remove(testfile) - unlink(dir, recursive = TRUE) -}) - -testthat::test_that("download_remove_zips test", { - dir <- paste0(tempdir(), "/yellowstone/") - testfile1 <- paste0(dir, "barren/coyote.zip") - dir.create(dirname(testfile1), recursive = TRUE) - file.create(testfile1, recursive = TRUE) - testfile2 <- paste0(dir, "retain/retain.txt") - dir.create(dirname(testfile2), recursive = TRUE) - file.create(testfile2, recursive = TRUE) - testthat::expect_no_error( - download_remove_zips(remove = TRUE, testfile1) - ) - # expect only the testfile1 directory to be removed - testthat::expect_equal( - length( - list.files( - dir, - recursive = TRUE, - include.dirs = TRUE - ) - ), - 2 - ) - unlink(paste0(dir, "/yellowstone")) -}) diff --git a/tests/testthat/test-download_olm.R b/tests/testthat/test-download_olm.R deleted file mode 100644 index b96a02d6..00000000 --- a/tests/testthat/test-download_olm.R +++ /dev/null @@ -1,49 +0,0 @@ - -testthat::test_that( - "Download OpenLandMap using STAC", - { - withr::local_package("rstac") - links <- - readRDS( - system.file("extdata", "openlandmap_assets.rds", package = "amadeus") - ) - product <- "no2_s5p.l3.trop.tmwm" - format <- "p50_p90_2km*.*tif" - directory_to_save <- paste0(tempdir(), "/olm") - acknowledgement <- TRUE - download <- FALSE - - testthat::expect_no_error( - download_olm( - product = product, - format = format, - directory_to_save = directory_to_save, - acknowledgement = acknowledgement, - download = download, - remove_command = FALSE - ) - ) - - commands_path <- paste0( - directory_to_save, - "/OLM_queried_", - product, - "_", - Sys.Date(), - "_wget_commands.txt" - ) - # import commands - commands <- read_commands(commands_path = commands_path) - # extract urls - urls <- extract_urls(commands = commands, position = 5) - # check HTTP URL status - url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") - # implement unit tests - test_download_functions(directory_to_save = directory_to_save, - commands_path = commands_path, - url_status = url_status) - # remove file with commands after test - file.remove(commands_path) - unlink(directory_to_save, recursive = TRUE) - } -) diff --git a/tests/testthat/test-dummies.R b/tests/testthat/test-dummies.R new file mode 100644 index 00000000..dbe0b84d --- /dev/null +++ b/tests/testthat/test-dummies.R @@ -0,0 +1,71 @@ +################################################################################ +##### unit and integration tests for Temporal Dummy functions + +################################################################################ +##### calc_temporal_dummies +testthat::test_that("calc_temporal_dummies (no errors)", { + + site_faux <- + data.frame( + site_id = "37031000188101", + lon = -78.90, + lat = 35.97, + time = as.POSIXlt("2022-01-01") + ) + + testthat::expect_no_error( + dum_res <- calc_temporal_dummies( + locs = site_faux, + year = seq(2018L, 2022L) + ) + ) + + # the result is a data frame + testthat::expect_s3_class(dum_res, "data.frame") + # ncol is equal to 12 + 5 + 7 + 4 + testthat::expect_equal(ncol(dum_res), 28L) + # should have each of the indicator groups + testthat::expect_equal(sum(unlist(dum_res[, -1:-4])), 3L) + + # with geometry + testthat::expect_no_error( + dum_res_geom <- calc_temporal_dummies( + locs = site_faux, + year = seq(2018L, 2022L), + geom = TRUE + ) + ) + testthat::expect_s4_class(dum_res_geom, "SpatVector") + + # error cases + site_faux_err <- site_faux + colnames(site_faux_err)[4] <- "date" + testthat::expect_error( + dum_res <- calc_temporal_dummies( + locs = site_faux_err + ) + ) + + testthat::expect_error( + dum_res <- calc_temporal_dummies( + locs = as.matrix(site_faux_err) + ) + ) + +}) + +testthat::test_that("calc_temporal_dummies (expected errors)", { + withr::local_package("terra") + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + testthat::expect_error( + calc_temporal_dummies( + ncp + ) + ) + testthat::expect_error( + calc_temporal_dummies( + terra::vect(ncp) + ) + ) +}) diff --git a/tests/testthat/test-ecoregion.R b/tests/testthat/test-ecoregion.R new file mode 100644 index 00000000..cd2b3f6d --- /dev/null +++ b/tests/testthat/test-ecoregion.R @@ -0,0 +1,225 @@ +################################################################################ +##### unit and integration tests for U.S. EPA Ecoregion functions +# nolint start + +################################################################################ +##### download_ecoregion +testthat::test_that("download_ecoregion", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + directory_to_save <- paste0(tempdir(), "/eco/") + certificate <- system.file("extdata/cacert_gaftp_epa.pem", + package = "amadeus") + # run download function + download_data(dataset_name = "ecoregion", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + unzip = FALSE, + remove_zip = FALSE, + download = FALSE, + remove_command = FALSE, + epa_certificate_path = certificate) + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # define file path with commands + commands_path <- paste0( + download_sanitize_path(directory_to_save), + "us_eco_l3_state_boundaries_", + Sys.Date(), + "_wget_command.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 3) + # check HTTP URL status + url_status <- + httr::HEAD(urls, config = httr::config(cainfo = certificate)) + url_status <- url_status$status_code + # implement unit tets + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + + file.create( + file.path(directory_to_save, "zip_files", + "us_eco_l3_state_boundaries.zip"), + recursive = TRUE + ) + testthat::expect_no_error( + download_data( + dataset_name = "ecoregion", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + unzip = FALSE, + remove_zip = TRUE, + download = FALSE, + remove_command = TRUE, + epa_certificate_path = certificate + ) + ) + testthat::expect_true( + dir.exists(paste0(directory_to_save, "/data_files")) + ) + testthat::expect_equal( + length( + list.files( + directory_to_save, recursive = TRUE, include.dirs = TRUE + ) + ), + 1 + ) + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_ecoregion (expected errors)", { + # expected errors due to invalid certificate + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + tdir <- tempdir(check = TRUE) + directory_to_save <- paste0(tempdir(), "/epa/") + certificate <- file.path(tdir, "cacert_gaftp_epa.pem") + # remove if there is a preexisting file + if (file.exists(certificate)) { + file.remove(certificate) + file.remove(gsub("pem", "crt", certificate)) + } + + # run download function + + testthat::expect_message( + download_data(dataset_name = "ecoregion", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE, + unzip = FALSE, + remove_zip = FALSE, + epa_certificate_path = certificate + ) + ) + # unlink dir + unlink(tdir) + + # define file path with commands + commands_path <- paste0( + directory_to_save, + "us_eco_l3_state_boundaries_", + Sys.Date(), + "_wget_command.txt" + ) + + # remove file with commands after test + testthat::expect_true(file.exists(commands_path)) + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_ecoregion +testthat::test_that("process_ecoregion", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_options(list(sf_use_s2 = FALSE)) + + path_eco <- testthat::test_path("..", "testdata", "eco_l3_clip.gpkg") + testthat::expect_no_error( + eco <- process_ecoregion(path_eco) + ) + + # test with cropping extent + testthat::expect_no_error( + process_ecoregion(path_eco, extent = terra::ext(eco)) + ) + ecotemp <- sf::st_read(path_eco) + # nolint start + addpoly <- + "POLYGON ((-70.2681 43.6787, -70.252234 43.677145, -70.251036 -43.680758, -70.268666 43.681505, -70.2681 43.6787))" + # nolint end + addpoly <- sf::st_as_sfc(addpoly, crs = "EPSG:4326") + addpoly <- sf::st_transform(addpoly, sf::st_crs(ecotemp)) + ecotemp[1, "geom"] <- addpoly + tdir <- tempdir() + sf::st_write(ecotemp, paste0(tdir, "/ecoregions.gpkg"), append = FALSE) + testthat::expect_no_error( + suppressWarnings(process_ecoregion(paste0(tdir, "/ecoregions.gpkg"))) + ) +}) + +################################################################################ +##### calc_ecoregion +testthat::test_that("calc_ecoregion", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_options(list(sf_use_s2 = FALSE)) + + ecol3 <- testthat::test_path("..", "testdata", "eco_l3_clip.gpkg") + site_faux <- + data.frame( + site_id = "37999109988101", + lon = -77.576, + lat = 39.40, + date = as.Date("2022-01-01") + ) + site_faux <- + terra::vect( + site_faux, + geom = c("lon", "lat"), + keepgeom = TRUE, + crs = "EPSG:4326") + site_faux <- terra::project(site_faux, "EPSG:5070") + + testthat::expect_no_error( + erras <- process_ecoregion(ecol3) + ) + + testthat::expect_no_error( + ecor_res <- calc_ecoregion( + from = erras, + locs = sf::st_as_sf(site_faux), + locs_id = "site_id" + ) + ) + + testthat::expect_no_error( + ecor_res <- calc_ecoregion( + from = erras, + locs = site_faux, + locs_id = "site_id" + ) + ) + + # the result is a data frame + testthat::expect_s3_class(ecor_res, "data.frame") + # ncol is equal to 2 + 5 + 2 + 1 + 1 + testthat::expect_equal(ncol(ecor_res), 4L) + # should have each of the indicator groups + dum_cn <- grep("DUM_", colnames(ecor_res)) + testthat::expect_equal( + sum(unlist(ecor_res[, dum_cn])), 2L + ) + + testthat::expect_no_error( + ecor_geom <- calc_ecoregion( + from = erras, + locs = site_faux, + locs_id = "site_id", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(ecor_geom), 4 + ) + testthat::expect_true( + "SpatVector" %in% class(ecor_geom) + ) +}) +# nolint end diff --git a/tests/testthat/test-geos.R b/tests/testthat/test-geos.R new file mode 100644 index 00000000..f11a75e6 --- /dev/null +++ b/tests/testthat/test-geos.R @@ -0,0 +1,221 @@ +################################################################################ +##### unit and integration tests for NASA GEOS-CF functions + +##### download_geos +testthat::test_that("download_geos", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + date_start <- "2019-09-09" + date_end <- "2019-09-09" + collections <- c("aqc_tavg_1hr_g1440x721_v1", + "chm_inst_1hr_g1440x721_p23") + directory_to_save <- paste0(tempdir(), "/geos/") + # run download function + testthat::expect_no_error( + download_data(dataset_name = "geos", + date = c(date_start, date_end), + collection = collections, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE) + ) + # define file path with commands + commands_path <- paste0(directory_to_save, + "geos_", + date_start, + "_", + date_end, + "_wget_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 2) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 2L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +##### process_geos +testthat::test_that("process_geos (no errors)", { + withr::local_package("terra") + collections <- c( + "a", + "c" + ) + # expect function + expect_true( + is.function(process_geos) + ) + for (c in seq_along(collections)) { + collection <- collections[c] + geos <- + process_geos( + date = c("2018-01-01", "2018-01-01"), + variable = "O3", + path = + testthat::test_path( + "..", + "testdata", + "geos", + collection + ) + ) + # expect output is SpatRaster + expect_true( + class(geos)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(geos) + ) + # expect non-null coordinate reference system + expect_false( + terra::crs(geos) == "" + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(geos)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(geos)) + ) + # expect time dimension is POSIXt for hourly + expect_true( + "POSIXt" %in% class(terra::time(geos)) + ) + # expect seconds in time information + expect_true( + "seconds" %in% terra::timeInfo(geos) + ) + # expect dimensions according to collection + if (collection == "a") { + expect_true( + dim(geos)[3] == 1 + ) + } else if (collection == "c") { + expect_true( + dim(geos)[3] == 5 + ) + } + } + # test with cropping extent + testthat::expect_no_error( + geos_ext <- process_geos( + date = c("2018-01-01", "2018-01-01"), + variable = "O3", + path = + testthat::test_path( + "..", + "testdata", + "geos", + "c" + ), + extent = terra::ext(geos) + ) + ) +}) + +testthat::test_that("process_geos (expected errors)", { + # expect error without variable + expect_error( + process_geos() + ) + # expect error on directory without data + expect_error( + process_geos( + variable = "O3", + path = "./" + ) + ) +}) + +##### calc_geos +testthat::test_that("calc_geos", { + withr::local_package("terra") + withr::local_package("data.table") + collections <- c( + "a", + "c" + ) + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_geos) + ) + for (c in seq_along(collections)) { + collection <- collections[c] + for (r in seq_along(radii)) { + geos <- + process_geos( + date = c("2018-01-01", "2018-01-01"), + variable = "O3", + path = + testthat::test_path( + "..", + "testdata", + "geos", + collection + ) + ) + geos_covariate <- + calc_geos( + from = geos, + locs = data.table::data.table(ncp), + locs_id = "site_id", + radius = radii[r], + fun = "mean" + ) + # set column names + geos_covariate <- calc_setcolumns( + from = geos_covariate, + lag = 0, + dataset = "geos", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(geos_covariate) == "data.frame" + ) + # expect 4 columns + expect_true( + ncol(geos_covariate) == 4 + ) + # expect numeric value + expect_true( + class(geos_covariate[, 4]) == "numeric" + ) + # expect $time is class POSIXt + expect_true( + "POSIXt" %in% class(geos_covariate$time) + ) + } + } + # with included geometry + testthat::expect_no_error( + geos_covariate_geom <- calc_geos( + from = geos, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(geos_covariate_geom), 4 + ) + testthat::expect_true( + "SpatVector" %in% class(geos_covariate_geom) + ) +}) diff --git a/tests/testthat/test-gmted.R b/tests/testthat/test-gmted.R new file mode 100644 index 00000000..16a9aa61 --- /dev/null +++ b/tests/testthat/test-gmted.R @@ -0,0 +1,286 @@ +################################################################################ +##### unit and integration tests for USGS GMTED functions + +##### download_gmted +testthat::test_that("download_gmted", { + withr::local_package("httr") + # function parameters + statistics <- c("Breakline Emphasis", + "Standard Deviation Statistic") + resolution <- "7.5 arc-seconds" + directory_to_save <- paste0(tempdir(), "/gmted/") + for (s in seq_along(statistics)) { + # run download function + download_data(dataset_name = "gmted", + statistic = statistics[s], + resolution = resolution, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + unzip = FALSE, + remove_zip = FALSE, + download = FALSE) + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # define file path with commands + commands_path <- paste0(download_sanitize_path(directory_to_save), + "gmted_", + gsub(" ", "", statistics[s]), + "_", + gsub(" ", "", resolution), + "_", + Sys.Date(), + "_curl_command.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 6) + filename <- extract_urls(commands = commands, position = 4) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + + file.create( + file.path(filename), + recursive = TRUE + ) + file.create( + file.path( + paste0(directory_to_save, "/data_files/test.txt") + ) + ) + # remove file with commands after test + # remove temporary gmted + testthat::expect_no_error( + download_data(dataset_name = "gmted", + statistic = statistics[s], + resolution = resolution, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + unzip = FALSE, + remove_zip = TRUE, + remove_command = TRUE, + download = FALSE) + ) + testthat::expect_true( + dir.exists(paste0(directory_to_save, "/data_files")) + ) + testthat::expect_equal( + length( + list.files( + directory_to_save, recursive = TRUE, include.dirs = TRUE + ) + ), + 2 + ) + unlink(directory_to_save, recursive = TRUE) + } +}) + +##### process_gmted +# test GMTED #### +testthat::test_that("process_gmted (no errors)", { + withr::local_package("terra") + statistics <- c( + "Breakline Emphasis", "Systematic Subsample" + ) + resolutions <- c( + "7.5 arc-seconds", "15 arc-seconds", "30 arc-seconds" + ) + # expect function + expect_true( + is.function(process_gmted) + ) + for (s in seq_along(statistics)) { + statistic <- statistics[s] + for (r in seq_along(resolutions)) { + resolution <- resolutions[r] + gmted <- + process_gmted( + variable = c(statistic, resolution), + path = + testthat::test_path( + "..", + "testdata", + "gmted", + paste0( + process_gmted_codes( + statistic, + statistic = TRUE, + invert = FALSE + ), + process_gmted_codes( + resolution, + resolution = TRUE, + invert = FALSE + ), + "_grd" + ) + ) + ) + # expect output is a SpatRaster + expect_true( + class(gmted)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(gmted) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(gmted)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(gmted)[1:2]) + ) + } + } + # test with cropping extent + testthat::expect_no_error( + gmted_ext <- + process_gmted( + variable = c("Breakline Emphasis", "7.5 arc-seconds"), + path = + testthat::test_path( + "..", + "testdata", + "gmted", + "be75_grd" + ), + ext = terra::ext(gmted) + ) + ) +}) + +testthat::test_that("process_gmted (expected errors)", { + # expect errors due to non-vector variable + expect_error( + gmted <- + process_gmted( + variable <- "Breakline Emphasis; 7.5 arc-seconds", + path = testthat::test_path( + "..", + "testdata", + "gmted" + ) + ) + ) +}) + +testthat::test_that("process_gmted_codes (auxiliary)", { + teststring <- "mx" + testthat::expect_no_error( + statorig <- process_gmted_codes( + teststring, + statistic = TRUE, + resolution = FALSE, + invert = TRUE + ) + ) + testthat::expect_equal(statorig, "Maximum Statistic") + + teststring <- "75" + testthat::expect_no_error( + resoorig <- process_gmted_codes( + teststring, + statistic = FALSE, + resolution = TRUE, + invert = TRUE + ) + ) + testthat::expect_equal(resoorig, "7.5 arc-seconds") +}) + +##### calc_gmted +testthat::test_that("calc_gmted", { + withr::local_package("terra") + statistics <- c( + "Breakline Emphasis", "Systematic Subsample" + ) + resolutions <- c( + "7.5 arc-seconds", "15 arc-seconds", "30 arc-seconds" + ) + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_gmted) + ) + for (s in seq_along(statistics)) { + statistic <- statistics[s] + for (r in seq_along(resolutions)) { + resolution <- resolutions[r] + for (a in seq_along(radii)) { + gmted <- + process_gmted( + variable = c(statistic, resolution), + path = + testthat::test_path( + "..", + "testdata", + "gmted" + ) + ) + gmted_covariate <- + calc_gmted( + from = gmted, + locs = ncp, + locs_id = "site_id", + radius = radii[a], + fun = "mean" + ) + # set column names + gmted_covariate <- calc_setcolumns( + from = gmted_covariate, + lag = 0, + dataset = "gmted", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(gmted_covariate) == "data.frame" + ) + # expect 2 columns + expect_true( + ncol(gmted_covariate) == 3 + ) + # expect numeric value + expect_true( + class(gmted_covariate[, 3]) == "numeric" + ) + } + } + } + testthat::expect_no_error( + gmted <- process_gmted( + variable = c("Breakline Emphasis", "7.5 arc-seconds"), + testthat::test_path( + "..", "testdata", "gmted", "be75_grd" + ) + ) + ) + testthat::expect_no_error( + gmted_geom <- calc_gmted( + gmted, + ncp, + "site_id", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(gmted_geom), 3 + ) + testthat::expect_true( + "SpatVector" %in% class(gmted_geom) + ) +}) diff --git a/tests/testthat/test-gridmet.R b/tests/testthat/test-gridmet.R new file mode 100644 index 00000000..1a8e6683 --- /dev/null +++ b/tests/testthat/test-gridmet.R @@ -0,0 +1,221 @@ +################################################################################ +##### unit and integration tests for Climatology Group Gridmet functions + +################################################################################ +##### download_gridmet +testthat::test_that("download_gridmet (no errors)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + year_start <- 2018 + year_end <- 2023 + variables <- "Precipitation" + directory_to_save <- paste0(tempdir(), "/gridmet/") + # run download function + download_data(dataset_name = "gridmet", + year = c(year_start, year_end), + variables = variables, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE) + # define path with commands + commands_path <- paste0(directory_to_save, + "/gridmet_", + year_start, "_", year_end, + "_curl_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 6) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 5L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_gridmet (expected errors - invalid years)", { + testthat::expect_error( + download_data( + dataset_name = "gridmet", + variables = "Precipitation", + year = c(10, 11), + acknowledgement = TRUE, + directory_to_save = paste0(tempdir(), "/g/") + ) + ) +}) + +testthat::test_that("download_gridmet (expected errors - invalid variables)", { + testthat::expect_error( + download_data( + dataset_name = "gridmet", + variables = "temp", + year = c(2018, 2018), + acknowledgement = TRUE, + directory_to_save = paste0(tempdir(), "/g/") + ) + ) +}) + +################################################################################ +##### process_gridmet +testthat::test_that("process_gridmet", { + withr::local_package("terra") + variable <- "Precipitation" + # expect function + expect_true( + is.function(process_gridmet) + ) + gridmet <- + process_gridmet( + date = c("2018-01-03", "2018-01-03"), + variable = variable, + path = + testthat::test_path( + "..", + "testdata", + "gridmet", + "pr" + ) + ) + # expect output is SpatRaster + expect_true( + class(gridmet)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(gridmet) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(gridmet)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(gridmet)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(gridmet)) + ) + # expect dimensions according to levels + expect_true( + dim(gridmet)[3] == 1 + ) + # test with cropping extent + testthat::expect_no_error( + gridmet_ext <- process_gridmet( + date = c("2018-01-03", "2018-01-03"), + variable = "Precipitation", + path = + testthat::test_path( + "..", + "testdata", + "gridmet", + "pr" + ), + extent = terra::ext(gridmet) + ) + ) +}) + +testthat::test_that("process_gridmet_codes", { + # gridmet + gc1 <- process_gridmet_codes("all") + expect_true(ncol(gc1) == 2) + gc2 <- process_gridmet_codes("sph", invert = TRUE) + expect_true(class(gc2) == "character") + expect_true(nchar(gc2) > 7) + gc3 <- process_gridmet_codes("Near-Surface Specific Humidity") + expect_true(class(gc3) == "character") + expect_true(nchar(gc3) < 7) + # process_variable_codes + expect_no_error(process_variable_codes("sph", "gridmet")) + expect_no_error( + process_variable_codes("Near-Surface Specific Humidity", "gridmet") + ) + expect_error( + process_variable_codes("error", "gridmet") + ) +}) + +################################################################################ +##### calc_gridmet +testthat::test_that("calc_gridmet", { + withr::local_package("terra") + withr::local_package("data.table") + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_gridmet) + ) + for (r in seq_along(radii)) { + gridmet <- + process_gridmet( + date = c("2018-01-03", "2018-01-03"), + variable = "pr", + path = + testthat::test_path( + "..", + "testdata", + "gridmet", + "pr" + ) + ) + gridmet_covariate <- + calc_gridmet( + from = gridmet, + locs = data.table::data.table(ncp), + locs_id = "site_id", + radius = radii[r], + fun = "mean" + ) + # set column names + gridmet_covariate <- calc_setcolumns( + from = gridmet_covariate, + lag = 0, + dataset = "gridmet", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(gridmet_covariate) == "data.frame" + ) + # expect 3 columns + expect_true( + ncol(gridmet_covariate) == 3 + ) + # expect numeric value + expect_true( + class(gridmet_covariate[, 3]) == "numeric" + ) + # expect $time is class Date + expect_true( + "POSIXt" %in% class(gridmet_covariate$time) + ) + } + # with included geometry + testthat::expect_no_error( + gridmet_covariate_geom <- calc_gridmet( + from = gridmet, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(gridmet_covariate_geom), 3 + ) + testthat::expect_true( + "SpatVector" %in% class(gridmet_covariate_geom) + ) +}) diff --git a/tests/testthat/test-groads.R b/tests/testthat/test-groads.R new file mode 100644 index 00000000..a0bd9df5 --- /dev/null +++ b/tests/testthat/test-groads.R @@ -0,0 +1,152 @@ +################################################################################ +##### unit and integration tests for NASA SEDAC gRoads functions + +################################################################################ +##### download_sedac_groads +testthat::test_that("download_sedac_groads", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + data_regions <- c("Americas", "Global") + data_formats <- c("Geodatabase", "Shapefile") + directory_to_save <- paste0(tempdir(), "/groads/") + # run download function + for (r in seq_along(data_regions)) { + data_region <- data_regions[r] + for (f in seq_along(data_formats)) { + download_data(dataset_name = "sedac_groads", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + data_format = data_formats[f], + data_region = data_region, + download = FALSE, + unzip = FALSE, + remove_zip = FALSE, + remove_command = FALSE) + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # define file path with commands + commands_path <- paste0(download_sanitize_path(directory_to_save), + "sedac_groads_", + gsub(" ", "_", tolower(data_region)), + "_", + Sys.Date(), + "_curl_command.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 11) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "GET") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + } + } + + testthat::expect_message( + download_data(dataset_name = "sedac_groads", + data_format = "Shapefile", + data_region = "Global", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + unzip = FALSE, + remove_zip = FALSE, + remove_command = TRUE) + ) + # remove temporary groads + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_sedac_groads +testthat::test_that("process_sedac_groads", { + withr::local_package("terra") + + # main test + testthat::expect_no_error( + groads <- process_sedac_groads( + path = testthat::test_path("../testdata/groads_test.shp") + ) + ) + # expect + testthat::expect_s4_class(groads, "SpatVector") + # error cases + testthat::expect_error( + process_sedac_groads(path = 1L) + ) + # test with cropping extent + testthat::expect_no_error( + groads_ext <- process_sedac_groads( + path = testthat::test_path("../testdata/groads_test.shp"), + extent = terra::ext(groads) + ) + ) +}) + +################################################################################ +##### calc_sedac_groads +testthat::test_that("calc_groads", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_options(list(sf_use_s2 = FALSE)) + + # test data generation + ncp <- data.frame( + site_id = c("1", "2"), + lon = c(-78.899, -78.643669), + lat = c(35.8774, 35.785342), + time = c(2022, 2022) + ) + # ncp <- terra::vect(ncp, keepgeom = TRUE, crs = "EPSG:4326") + path_groads <- testthat::test_path("..", "testdata", "groads_test.shp") + groads <- terra::vect(path_groads) + + testthat::expect_no_error( + groads_res <- calc_sedac_groads( + from = groads, + locs = ncp, + locs_id = "site_id", + radius = 5000 + ) + ) + + testthat::expect_error( + calc_sedac_groads( + from = groads, + locs = ncp, + locs_id = "site_id", + radius = 0 + ) + ) + + # expect data.frame + testthat::expect_s3_class(groads_res, "data.frame") + + # return with geometry + testthat::expect_no_error( + groads_geom <- calc_sedac_groads( + from = groads, + locs = ncp, + locs_id = "site_id", + radius = 5000, + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(groads_geom), 4 + ) + testthat::expect_true( + "SpatVector" %in% class(groads_geom) + ) +}) diff --git a/tests/testthat/test-hms.R b/tests/testthat/test-hms.R new file mode 100644 index 00000000..7d535b08 --- /dev/null +++ b/tests/testthat/test-hms.R @@ -0,0 +1,304 @@ +################################################################################ +##### unit and integration tests for NOAA HMS functions +# nolint start + +################################################################################ +##### download_hms +testthat::test_that("download_hms (no errors)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + date_start <- "2022-08-12" + date_end <- "2022-09-21" + directory_to_save <- paste0(tempdir(), "/hms/") + data_formats <- c("Shapefile", "KML") + for (d in seq_along(data_formats)) { + # run download function + download_data(dataset_name = "smoke", + date = c(date_start, date_end), + data_format = data_formats[d], + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE, + unzip = FALSE, + remove_zip = FALSE) + # define file path with commands + commands_path <- paste0(download_sanitize_path(directory_to_save), + "hms_smoke_", + gsub("-", "", date_start), + "_", + gsub("-", "", date_end), + "_curl_commands.txt") + # expect sub-directories to be created + if (data_formats[d] == "Shapefile") { + expected_folders <- 3 + } else { + expected_folders <- 2 + } + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == expected_folders + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 6) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 10L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + # remove temporary hms + unlink(directory_to_save, recursive = TRUE) + } +}) + +testthat::test_that("download_hms (expected errors)", { + error_directory <- paste0(tempdir(), "/error/") + testthat::expect_error( + download_data( + dataset_name = "hms", + acknowledgement = TRUE, + directory_to_save = error_directory, + unzip = FALSE, + remove_zip = TRUE + ) + ) + unlink(error_directory, recursive = TRUE) +}) + +testthat::test_that("download_hms (live)", { + # function parameters + date <- "2018-01-01" + directory <- paste0(tempdir(), "/hms/") + # run download function + download_data( + dataset_name = "hms", + date = c(date, date), + directory_to_save = directory, + acknowledgement = TRUE, + download = TRUE, + unzip = TRUE, + remove_zip = FALSE, + remove_command = FALSE + ) + Sys.sleep(1.5) + testthat::expect_true( + length(list.files(directory, recursive = TRUE, include.dirs = TRUE)) == 8 + ) + commands <- list.files(directory, pattern = ".txt", full.names = TRUE) + testthat::expect_true( + file.exists(commands) + ) + # remove directory + unlink(directory, recursive = TRUE) +}) + +################################################################################ +##### process_hms +testthat::test_that("process_hms (with polygons)", { + withr::local_package("terra") + # expect function + testthat::expect_true( + is.function(process_hms) + ) + hms <- + process_hms( + date = c("2022-06-10", "2022-06-13"), + path = testthat::test_path( + "..", + "testdata", + "hms" + ) + ) + # expect output is a SpatVector or character + testthat::expect_true( + methods::is(hms, "SpatVector") + ) + # expect non-null coordinate reference system + testthat::expect_false( + is.null(terra::crs(hms)) + ) + # expect two columns + testthat::expect_true( + ncol(hms) == 2 + ) + # expect density and date column + testthat::expect_true( + all(c("Density", "Date") %in% names(hms)) + ) + # test with cropping extent + testthat::expect_no_error( + hms_ext <- process_hms( + date = c("2022-06-10", "2022-06-11"), + path = testthat::test_path( + "..", + "testdata", + "hms" + ), + extent = terra::ext(hms) + ) + ) +}) + +testthat::test_that("process_hms (absent polygons - 12/31/2018)", { + withr::local_package("terra") + # expect function + testthat::expect_true( + is.function(process_hms) + ) + hms <- + process_hms( + date = c("2018-12-31", "2018-12-31"), + path = testthat::test_path( + "..", + "testdata", + "hms" + ) + ) + # expect character + testthat::expect_true(is.character(hms)) +}) + +################################################################################ +##### calc_hms +testthat::test_that("calc_hms (no errors)", { + withr::local_package("terra") + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_hms) + ) + for (r in seq_along(radii)) { + hms <- + process_hms( + date = c("2022-06-10", "2022-06-11"), + path = testthat::test_path( + "..", + "testdata", + "hms" + ) + ) + hms_covariate <- + calc_hms( + from = hms, + locs = ncp, + locs_id = "site_id", + radius = radii[r], + geom = FALSE + ) + # set column names + hms_covariate <- calc_setcolumns( + from = hms_covariate, + lag = 0, + dataset = "hms", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(hms_covariate) == "data.frame" + ) + # expect 3 columns + expect_true( + ncol(hms_covariate) == 5 + ) + # expect 2 rows + expect_true( + nrow(hms_covariate) == 2 + ) + # expect integer for binary value + expect_true( + is.integer(hms_covariate[, 3]) + ) + # expect binary + expect_true( + all(unique(hms_covariate[, 3]) %in% c(0, 1)) + ) + } +}) + +testthat::test_that("calc_hms (with geometry)", { + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + hms_dir <- testthat::test_path( + "..", "testdata", "hms" + ) + hms <- process_hms( + date = c("2022-06-10", "2022-06-13"), + path = hms_dir + ) + hms_covariate_geom <- calc_hms( + from = hms, + locs = ncp, + locs_id = "site_id", + radius = 0, + geom = TRUE + ) + # with geometry will have 5 columns + testthat::expect_equal( + ncol(hms_covariate_geom), 5 + ) + testthat::expect_s4_class( + hms_covariate_geom, "SpatVector" + ) +}) + +testthat::test_that("calc_hms (absent polygons - 12/31/2018)", { + withr::local_package("terra") + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_hms) + ) + # expect function + testthat::expect_true( + is.function(process_hms) + ) + hms <- + process_hms( + date = c("2018-12-31", "2018-12-31"), + path = testthat::test_path( + "..", + "testdata", + "hms" + ) + ) + for (r in seq_along(radii)) { + hms_covar <- calc_hms( + from = hms, + locs = ncp, + locs_id = "site_id", + radius = radii[r], + geom = FALSE + ) + # data frame + testthat::expect_true(methods::is(hms_covar, "data.frame")) + # 5 columns + testthat::expect_equal(ncol(hms_covar), 7) + } + for (r in seq_along(radii)) { + hms_covar <- calc_hms( + from = hms, + locs = ncp, + locs_id = "site_id", + radius = radii[r], + geom = TRUE + ) + # SpatVector + testthat::expect_true(methods::is(hms_covar, "SpatVector")) + # 5 columns + testthat::expect_equal(ncol(hms_covar), 5) + } +}) +# nolint end diff --git a/tests/testthat/test-huc.R b/tests/testthat/test-huc.R new file mode 100644 index 00000000..0c878548 --- /dev/null +++ b/tests/testthat/test-huc.R @@ -0,0 +1,124 @@ +################################################################################ +##### unit and integration tests for USGS NHD functions + +################################################################################ +##### download_huc +testthat::test_that("download_huc", { + withr::local_package("httr") + directory_to_save <- paste0(tempdir(), "/huc/") + allregions <- c("Lower48", "Islands") + alltypes <- c("Seamless", "OceanCatchment") + + for (region in allregions) { + for (type in alltypes) { + testthat::expect_no_error( + download_huc( + region, type, + directory_to_save, + acknowledgement = TRUE, + download = FALSE, + unzip = FALSE + ) + ) + commands_path <- paste0( + directory_to_save, + "USGS_NHD_", + region, + "_", + type, + "_", + Sys.Date(), + "_wget_commands.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 5) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + } + } + testthat::expect_error( + download_huc( + "Lower48", "OceanCatchment", + tempdir(), + acknowledgement = TRUE, + download = TRUE, + unzip = TRUE + ) + ) + unlink(directory_to_save, recursive = TRUE) +}) + + +################################################################################ +##### process_huc +testthat::test_that("process_huc", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_package("nhdplusTools") + withr::local_options(list(sf_use_s2 = FALSE)) + # Set up test data + path <- testthat::test_path( + "..", "testdata", "huc12", "NHDPlus_test.gpkg" + ) + + # Call the function + testthat::expect_error(process_huc(path)) + testthat::expect_no_error( + result <- + process_huc( + path, + layer_name = "NHDPlus_test", + huc_level = "HUC_12", + huc_header = "030202" + ) + ) + testthat::expect_true(inherits(result, "SpatVector")) + + # query case + testthat::expect_no_error( + result <- + process_huc( + path, + layer_name = "NHDPlus_test", + huc_level = "HUC_12", + huc_header = "030202" + ) + ) + testthat::expect_true(inherits(result, "SpatVector")) + + testthat::expect_error( + process_huc( + path, + layer_name = "HUc", + huc_level = "HUC_12", + huc_header = "030202" + ) + ) + + # Set up test data + path2 <- testthat::test_path( + "..", "testdata", "huc12" + ) + + # Call the function and expect an error + testthat::expect_error(process_huc(path2)) + + # test with cropping extent + testthat::expect_no_error( + huc_ext <- process_huc( + path, + layer_name = "NHDPlus_test", + huc_level = "HUC_12", + huc_header = "030202", + extent = terra::ext(result) + ) + ) +}) diff --git a/tests/testthat/test-koppen-geiger.R b/tests/testthat/test-koppen-geiger.R new file mode 100644 index 00000000..3b580acd --- /dev/null +++ b/tests/testthat/test-koppen-geiger.R @@ -0,0 +1,139 @@ +################################################################################ +##### unit and integration tests for Koppen Geiger functions + +################################################################################ +##### download_koppen_geiger +testthat::test_that("download_koppen_geiger", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + time_periods <- c("Present", "Future") + data_resolutions <- c("0.0083") + directory_to_save <- paste0(tempdir(), "/kop/") + # run download function + for (p in seq_along(time_periods)) { + time_period <- time_periods[p] + for (d in seq_along(data_resolutions)) { + download_data(dataset_name = "koppen", + time_period = time_period, + data_resolution = data_resolutions[d], + directory_to_save = directory_to_save, + acknowledgement = TRUE, + unzip = FALSE, + remove_zip = FALSE, + download = FALSE, + remove_command = FALSE) + # define file path with commands + commands_path <- paste0(download_sanitize_path(directory_to_save), + "koppen_geiger_", + time_period, + "_", + gsub("\\.", + "p", + data_resolutions[d]), + "_", + Sys.Date(), + "_wget_command.txt") + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 2) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + } + } + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_koppen_geiger +testthat::test_that("process_koppen_geiger", { + withr::local_package("terra") + path_kgeiger <- + testthat::test_path("../testdata", "koppen_subset.tif") + + testthat::expect_no_error( + kgeiger <- process_koppen_geiger(path_kgeiger) + ) + + # test with cropping extent + testthat::expect_no_error( + kgeiger_ext <- process_koppen_geiger( + path_kgeiger, + extent = terra::ext(kgeiger) + ) + ) + testthat::expect_s4_class(kgeiger, "SpatRaster") + + path_kgeiger_f <- + testthat::test_path("../testdata", "kop", "Beck_KG_V1_future_0p5.tif") + testthat::expect_no_error( + kgeiger_f <- process_koppen_geiger(path_kgeiger_f) + ) +}) + +################################################################################ +##### calc_koppen_geiger +testthat::test_that("calc_koppen_geiger", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_options( + list(sf_use_s2 = FALSE) + ) + + site_faux <- + data.frame( + site_id = "37031000188101", + lon = -78.90, + lat = 35.97 + ) + site_faux <- terra::vect(site_faux, crs = "EPSG:4326", keepgeom = TRUE) + kp_path <- testthat::test_path("..", "testdata", "koppen_subset.tif") + + testthat::expect_no_error( + kgras <- process_koppen_geiger(path = kp_path) + ) + + testthat::expect_no_error( + kg_res <- calc_koppen_geiger( + from = kgras, + locs = site_faux + ) + ) + testthat::expect_no_error( + kg_res <- calc_koppen_geiger( + from = kgras, + locs = sf::st_as_sf(site_faux) + ) + ) + # the result is a data frame + testthat::expect_s3_class(kg_res, "data.frame") + # ncol is equal to 7 + testthat::expect_equal(ncol(kg_res), 7) + # should have only one climate zone + testthat::expect_equal(sum(unlist(kg_res[, c(-1, -2)])), 1) + # with included geometry + testthat::expect_no_error( + kg_geom <- calc_koppen_geiger( + from = kgras, + locs = sf::st_as_sf(site_faux), + geom = TRUE + ) + ) + testthat::expect_equal(ncol(kg_geom), 7) + testthat::expect_true("SpatVector" %in% class(kg_geom)) +}) diff --git a/tests/testthat/test-merra2.R b/tests/testthat/test-merra2.R new file mode 100644 index 00000000..ba21c140 --- /dev/null +++ b/tests/testthat/test-merra2.R @@ -0,0 +1,243 @@ +################################################################################ +##### unit and integration tests for NASA MERRA2 functions + +##### download_merra2 +testthat::test_that("download_merra2 (no errors)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + date_start <- "2022-02-14" + date_end <- "2022-03-08" + collections <- c("inst1_2d_asm_Nx", "inst3_3d_asm_Np") + directory_to_save <- paste0(tempdir(), "/merra2/") + # run download function + testthat::expect_no_error( + download_data(dataset_name = "merra2", + date = c(date_start, date_end), + collection = collections, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE) + ) + # define path with commands + commands_path <- paste0(directory_to_save, + "merra2_", + date_start, + "_", + date_end, + "_wget_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 2) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 3L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_merra2 (expected errors)", { + # expected error due to unrecognized collection + # function parameters + collections <- "uNrEcOgNiZeD" + directory_to_save <- testthat::test_path("..", "testdata/", "") + testthat::expect_error( + download_data( + dataset_name = "merra", + collection = collections, + directory_to_save = directory_to_save, + acknowledgement = TRUE + ) + ) +}) + +##### process_merra2 +testthat::test_that("process_merra2", { + withr::local_package("terra") + #* indicates three dimensional data that has subset to single + #* pressure level for test data set + collection <- c( + "inst1_2d_int_Nx", "inst3_2d_gas_Nx", "inst3_3d_chm_Nv", #* + "inst6_3d_ana_Np", #* + "statD_2d_slv_Nx", "tavg1_2d_chm_Nx", "tavg3_3d_udt_Np" #* + ) + variable <- c( + "CPT", "AODANA", "AIRDENS", #* + "SLP", #* + "HOURNORAIN", "COCL", "DUDTANA" #* + ) + merra2_df <- data.frame(collection, variable) + # expect function + expect_true( + is.function(process_merra2) + ) + for (c in seq_along(merra2_df$collection)) { + merra2 <- + process_merra2( + date = c("2018-01-01", "2018-01-01"), + variable = merra2_df$variable[c], + path = + testthat::test_path( + "..", + "testdata", + "merra2", + merra2_df$collection[c] + ) + ) + # expect output is SpatRaster + expect_true( + class(merra2)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(merra2) + ) + # expect non-null coordinate reference system + expect_false( + terra::crs(merra2) == "" + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(merra2)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(merra2)) + ) + # expect time dimension is POSIXt for hourly + expect_true( + "POSIXt" %in% class(terra::time(merra2)) + ) + # expect seconds in time information + expect_true( + "seconds" %in% terra::timeInfo(merra2) + ) + # expect 8 levels for 3 hourly data + expect_true( + all(dim(merra2) == c(2, 3, 1)) + ) + } + class(merra2) + # test with cropping extent + testthat::expect_no_error( + merra2_ext <- process_merra2( + date = c("2018-01-01", "2018-01-01"), + variable = "CPT", + path = + testthat::test_path( + "..", + "testdata", + "merra2", + "inst1_2d_int_Nx" + ), + extent = terra::ext(merra2) + ) + ) +}) + +##### calc_merra2 +testthat::test_that("calc_merra2", { + withr::local_package("terra") + withr::local_package("data.table") + #* indicates three dimensional data that has subset to single + #* pressure level for test data set + collections <- c( + "inst1_2d_int_Nx", "inst3_2d_gas_Nx", "inst3_3d_chm_Nv", #* + "inst6_3d_ana_Np", #* + "statD_2d_slv_Nx", "tavg1_2d_chm_Nx", "tavg3_3d_udt_Np" #* + ) + variables <- c( + "CPT", "AODANA", "AIRDENS", #* + "SLP", #* + "HOURNORAIN", "COCL", "DUDTANA" #* + ) + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_merra2) + ) + for (c in seq_along(collections)) { + collection <- collections[c] + variable <- variables[c] + for (r in seq_along(radii)) { + merra2 <- + process_merra2( + date = c("2018-01-01", "2018-01-01"), + variable = variable, + path = + testthat::test_path( + "..", + "testdata", + "merra2", + collection + ) + ) + merra2_covariate <- + calc_merra2( + from = merra2, + locs = data.table::data.table(ncp), + locs_id = "site_id", + radius = radii[r], + fun = "mean" + ) + # set column names + merra2_covariate <- calc_setcolumns( + from = merra2_covariate, + lag = 0, + dataset = "merra2", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(merra2_covariate) == "data.frame" + ) + if (grepl("lev", names(merra2)[1])) { + # expect 4 columns + expect_true( + ncol(merra2_covariate) == 4 + ) + # expect numeric value + expect_true( + class(merra2_covariate[, 4]) == "numeric" + ) + } else { + # expect 3 columns + expect_true( + ncol(merra2_covariate) == 3 + ) + # expect numeric value + expect_true( + class(merra2_covariate[, 3]) == "numeric" + ) + } + # expect $time is class Date + expect_true( + "POSIXt" %in% class(merra2_covariate$time) + ) + } + } + # with included geometry + testthat::expect_no_error( + merra2_covariate_geom <- calc_merra2( + from = merra2, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(merra2_covariate_geom), 4 + ) + testthat::expect_true( + "SpatVector" %in% class(merra2_covariate_geom) + ) +}) diff --git a/tests/testthat/test-modis.R b/tests/testthat/test-modis.R new file mode 100644 index 00000000..4b8f1537 --- /dev/null +++ b/tests/testthat/test-modis.R @@ -0,0 +1,942 @@ +################################################################################ +##### unit and integration tests for NASA MODIS functions +# nolint start + +################################################################################ +##### download_modis +testthat::test_that("download_modis (MODIS-MOD09GA)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + years <- 2020 + product <- "MOD09GA" + version <- "61" + horizontal_tiles <- c(12, 13) + vertical_tiles <- c(5, 6) + nasa_earth_data_token <- "tOkEnPlAcEhOlDeR" + directory_to_save <- paste0(tempdir(), "/mod/") + for (y in seq_along(years)) { + date_start <- paste0(years[y], "-06-20") + date_end <- paste0(years[y], "-06-24") + # run download function + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = version, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + # define file path with commands + commands_path <- paste0( + directory_to_save, + product, + "_", + date_start, + "_", + date_end, + "_wget_commands.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path)[, 2] + # extract urls + urls <- extract_urls(commands = commands, position = 4) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 3L, method = "SKIP") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + } + unlink(directory_to_save, recursive = TRUE) +}) + + +testthat::test_that("download_modis (MODIS-MOD06L2)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + product <- "MOD06_L2" + version <- "61" + date_start <- "2019-02-18" + date_end <- "2019-02-18" + nasa_earth_data_token <- "tOkEnPlAcEhOlDeR" + horizontal_tiles <- c(8, 10) + vertical_tiles <- c(4, 5) + directory_to_save <- paste0(tempdir(), "/mod/") + + testthat::expect_error( + kax <- download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = version, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + mod06_links = NULL, + remove_command = FALSE) + ) + # link check + tdir <- tempdir() + faux_urls <- + rbind( + c(4387858920, + paste0( + "/archive/allData/61/MOD06_L2/2019/049/", + "MOD06_L2.A2019049.0720.061.2019049194350.hdf" + ), + 28267915) + ) + + faux_urls <- data.frame(faux_urls) + mod06_scenes <- paste0(tdir, "/mod06_example.csv") + write.csv(faux_urls, mod06_scenes, row.names = FALSE) + + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = version, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + mod06_links = mod06_scenes, + remove_command = FALSE) + + # define file path with commands + commands_path <- paste0( + directory_to_save, + product, + "_", + date_start, + "_", + date_end, + "_wget_commands.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path)[, 2] + # extract urls + urls <- extract_urls(commands = commands, position = 4) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "SKIP") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + + +testthat::test_that("download_modis (expected errors)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + years <- 2020 + product <- c("MOD09GA", "MOD11A1", "MOD13A2", "MCD19A2") + product <- sample(product, 1L) + version <- "61" + horizontal_tiles <- c(12, 13) + vertical_tiles <- c(5, 6) + nasa_earth_data_token <- "tOkEnPlAcEhOlDeR" + directory_to_save <- paste0(tempdir(), "/mod/") + date_start <- paste0(years, "-06-25") + date_end <- paste0(years, "-06-28") + + # no token + testthat::expect_no_error( + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = version, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + ) + + # no token + testthat::expect_error( + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = version, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = NULL, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + ) + + # year difference between date_start and date_end + testthat::expect_error( + download_data(dataset_name = "modis", + date = c(date_start, "2024-03-28"), + product = "MOD11A1", + version = version, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + ) + + # null version + testthat::expect_error( + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = NULL, + horizontal_tiles = horizontal_tiles, + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + ) + + # invalid tile range (horizontal) + testthat::expect_error( + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = "61", + horizontal_tiles = c(-13, -3), + vertical_tiles = vertical_tiles, + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + ) + + # invalid tile range (horizontal) + testthat::expect_error( + download_data(dataset_name = "modis", + date = c(date_start, date_end), + product = product, + version = "61", + horizontal_tiles = horizontal_tiles, + vertical_tiles = c(100, 102), + nasa_earth_data_token = nasa_earth_data_token, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + ) + + # define file path with commands + commands_path <- paste0( + directory_to_save, + product, + "_", + date_start, + "_", + date_end, + "_wget_commands.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path)[, 2] + # extract urls + urls <- extract_urls(commands = commands, position = 4) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 2L, method = "SKIP") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_modis* +testthat::test_that("process_modis_sds", { + # main test + txt_products <- c("MOD11A1", "MOD13A2", "MOD09GA", "MCD19A2") + txt_exp_output <- + c( + MOD11A1 = "(LST_)", + MOD13A2 = "(NDVI)", + MOD09GA = "(sur_refl_b0)", + MCD19A2 = "(Optical_Depth)" + ) + txt_exp_output <- unname(txt_exp_output) + # expect + testthat::expect_message( + mcdtest <- process_modis_sds("MCD19A2") + ) + testthat::expect_equal( + mcdtest, "(Optical_Depth)" + ) + testthat::expect_no_error( + process_modis_sds("MCD19A2", "(cos|RelAZ|Angle)") + ) + for (i in 1:3) { + testthat::expect_equal( + process_modis_sds(txt_products[i]), txt_exp_output[i] + ) + } + testthat::expect_no_error( + filt_other <- process_modis_sds("ignored", "(cos)") + ) + testthat::expect_equal(filt_other, "(cos)") + +}) + + +testthat::test_that("process_flatten_sds", { + withr::local_package("terra") + withr::local_package("stars") + withr::local_options(list(sf_use_s2 = FALSE)) + + mcd19 <- testthat::test_path( + "..", "testdata", "modis", "MCD19A2.A2021227.h11v05.061.2023149160635.hdf" + ) + mod09 <- testthat::test_path( + "..", "testdata", "modis", "MOD09GA.A2021227.h11v05.061.2021229035936.hdf" + ) + + # main test: mcd19 + testthat::expect_no_error( + mcdaggr <- + process_flatten_sds( + path = mcd19, + subdataset = "Optical_Depth", + fun_agg = "mean" + ) + ) + testthat::expect_s4_class(mcdaggr, "SpatRaster") + testthat::expect_equal(terra::nlyr(mcdaggr), 2L) + testthat::expect_equal( + all(grepl("^Optical", names(mcdaggr))), + TRUE + ) + + # flatten error + path_mod06 <- + testthat::test_path( + "..", "testdata", "modis", + "MOD06_L2.A2021227.0320.061.2021227134022.hdf" + ) + + testthat::expect_error( + process_flatten_sds( + path = path_mod06, + subdataset = "(Fraction)", + fun_agg = "mean" + ) + ) + + # mod09 test + mod09_sub <- + sprintf("HDF4_EOS:EOS_GRID:%s:MODIS_Grid_500m_2D:sur_refl_b01_1", mod09) + # main test: mcd19 + testthat::expect_no_error( + modaggr <- + process_flatten_sds( + path = mod09_sub, + subdataset = NULL, + fun_agg = "mean" + ) + ) + testthat::expect_s4_class(modaggr, "SpatRaster") + testthat::expect_equal(terra::nlyr(modaggr), 1L) + testthat::expect_true(grepl("^500m Surface", names(modaggr))) +}) + + +testthat::test_that("process_modis_merge", { + withr::local_package("terra") + withr::local_package("stars") + withr::local_options(list(sf_use_s2 = FALSE)) + + path_mod11 <- + testthat::test_path( + "../testdata/modis/", + "MOD11A1.A2021227.h11v05.061.2021228105320.hdf" + ) + testthat::expect_no_error( + process_modis_merge( + path = path_mod11, + date = "2021-08-15", + subdataset = "(LST_)" + ) + ) + # case 2: standard mod13a2 + path_mod13 <- + testthat::test_path( + "../testdata/modis/", + "MOD13A2.A2021225.h11v05.061.2021320163751.hdf" + ) + testthat::expect_no_error( + process_modis_merge( + path = path_mod13, + date = "2021-08-13", + subdataset = "(NDVI)" + ) + ) + + # case 3: standard mcd19a2 + path_mcd19 <- + testthat::test_path( + "../testdata/modis/", + "MCD19A2.A2021227.h11v05.061.2023149160635.hdf" + ) + testthat::expect_no_error( + process_modis_merge( + path = path_mcd19, + date = "2021-08-15", + subdataset = "(Optical_Depth)" + ) + ) + + # case 3: standard mcd19a2 + path_mod09 <- + testthat::test_path( + "../testdata/modis/", + "MOD09GA.A2021227.h11v05.061.2021229035936.hdf" + ) + testthat::expect_no_error( + process_modis_merge( + path = path_mod09, + date = "2021-08-15", + subdataset = "(sur_refl_b0)" + ) + ) + + # multiple files + paths_mod13 <- list.files( + testthat::test_path("../testdata/modis/"), + pattern = "MOD13A2", + full.names = TRUE + ) + testthat::expect_no_error( + process_modis_merge( + path = paths_mod13, + date = "2021-08-13", + subdataset = "(NDVI)" + ) + ) + testthat::expect_error( + process_modis_merge( + path = paths_mod13, + date = "2021-08-13", + subdataset = "(NDVI)", + fun_agg = 3L + ) + ) + +}) + + +testthat::test_that("process_blackmarble*", { + withr::local_package("terra") + + path_vnp46 <- + list.files( + testthat::test_path("..", "testdata", "modis"), + "^VNP46A2", + full.names = TRUE + ) + + testthat::expect_no_error( + corn <- process_blackmarble_corners() + ) + testthat::expect_error( + process_blackmarble_corners(hrange = c(99, 104)) + ) + + testthat::expect_warning( + vnp46_proc <- process_blackmarble( + path = path_vnp46[1], + tile_df = corn, + date = "2018-08-13" + ) + ) + testthat::expect_s4_class(vnp46_proc, "SpatRaster") + testthat::expect_equal(terra::nlyr(vnp46_proc), 1L) + + testthat::expect_warning( + vnp46_proc2 <- process_blackmarble( + path = path_vnp46[1], + tile_df = corn, + subdataset = c(3L, 5L), + date = "2018-08-13" + ) + ) + + testthat::expect_s4_class(vnp46_proc2, "SpatRaster") + testthat::expect_equal(terra::nlyr(vnp46_proc2), 2L) + + testthat::expect_error( + process_blackmarble( + path = path_vnp46[1], + tile_df = corn, + date = "2018~08~13" + ) + ) + +}) + + +testthat::test_that("process_modis_warp + process_modis_swath", { + withr::local_package("stars") + withr::local_package("terra") + withr::local_options(list(sf_use_s2 = FALSE)) + + path_mod06 <- + testthat::test_path( + "..", "testdata", "modis", + "MOD06_L2.A2021227.0320.061.2021227134022.hdf" + ) + path_mod06 <- + sprintf("HDF4_EOS:EOS_SWATH:%s:mod06:Cloud_Fraction_Night", path_mod06) + # internal warning from stars + testthat::expect_warning( + warped <- process_modis_warp( + path = path_mod06 + ) + ) + testthat::expect_s3_class(warped, "stars") + testthat::expect_equal( + unname(stars::st_res(warped)[1]), 0.1, tolerance = 1e-6 + ) + + path_mod06s <- + list.files( + testthat::test_path("..", "testdata", "modis"), + pattern = "MOD06_L2", + full.names = TRUE + ) + + testthat::expect_warning( + warped4 <- process_modis_swath( + path = path_mod06s, + date = "2021-08-15", + subdataset = c("Cloud_Fraction_Night", "Cloud_Fraction_Day") + ) + ) + testthat::expect_s4_class(warped4, "SpatRaster") + + +}) + + +testthat::test_that("process_modis (expected errors)", { + withr::local_package("terra") + withr::local_package("stars") + withr::local_options(list(sf_use_s2 = FALSE)) + path_mod06 <- + testthat::test_path( + "..", "testdata", "modis", + "MOD06_L2.A2021227.0320.061.2021227134022.hdf" + ) + path_mod06e <- + sprintf("HDF4_EOS:EOS_SWATH:%s:mod06:Cloud_Fraction_Night", path_mod06) + + testthat::expect_no_error( + suppressWarnings( + process_modis_swath( + path = path_mod06, + subdataset = "Cloud_Fraction_Night", + date = "2021-08-15" + ) + ) + ) + testthat::expect_error( + process_modis_swath( + path = path_mod06, + subdataset = "Cloud_Fraction_Night", + date = "2021~08~15" + ) + ) + testthat::expect_error( + process_modis_swath( + path = path_mod06, + subdataset = "Cloud_Fraction_Night", + date = "2021-13-15" + ) + ) + testthat::expect_error( + process_modis_swath( + path = path_mod06, + subdataset = "Cloud_Fraction_Night", + date = "2021-12-45" + ) + ) +}) + +################################################################################ +##### calc_modis* +testthat::test_that("calc_modis_par", { + withr::local_package("sf") + withr::local_package("terra") + withr::local_package("stars") + withr::local_package("lwgeom") + withr::local_options( + list( + sf_use_s2 = FALSE, + future.resolve.recursive = 2L + ) + ) + + site_faux <- + data.frame( + site_id = "37999904288101", + lon = -78.87, + lat = 35.8734, + time = as.Date("2021-08-15") + ) + site_faux <- + terra::vect( + site_faux, + geom = c("lon", "lat"), + keepgeom = FALSE, + crs = "EPSG:4326") + + # case 1: standard mod11a1 + path_mod11 <- + testthat::test_path( + "../testdata/modis/", + "MOD11A1.A2021227.h11v05.061.2021228105320.hdf" + ) + testthat::expect_no_error( + base_mod11 <- + process_modis_merge( + path = path_mod11, + date = "2021-08-15", + subdataset = "(LST_)", + fun_agg = "mean" + ) + ) + testthat::expect_s4_class(base_mod11, "SpatRaster") + + testthat::expect_no_error( + suppressWarnings( + calc_mod11 <- + calc_modis_par( + from = path_mod11, + locs = sf::st_as_sf(site_faux), + preprocess = process_modis_merge, + name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), + subdataset = "(LST_)", + nthreads = 1L + ) + ) + ) + testthat::expect_s3_class(calc_mod11, "data.frame") + + # ... _add arguments test + aux <- 0L + testthat::expect_no_error( + suppressWarnings( + calc_mod11 <- + calc_modis_par( + from = path_mod11, + locs = sf::st_as_sf(site_faux), + preprocess = process_modis_merge, + package_list_add = c("MASS"), + export_list_add = c("aux"), + name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), + subdataset = "(LST_)", + nthreads = 1L + ) + ) + ) + + # with geometry + testthat::expect_no_error( + suppressWarnings( + calc_mod11_geom <- + calc_modis_par( + from = path_mod11, + locs = sf::st_as_sf(site_faux), + preprocess = process_modis_merge, + package_list_add = c("MASS"), + export_list_add = c("aux"), + name_covariates = c("MOD_LSTNT_0_", "MOD_LSTDY_0_"), + subdataset = "(LST_)", + nthreads = 1L, + geom = TRUE + ) + ) + ) + testthat::expect_s4_class(calc_mod11_geom, "SpatVector") + + # case 2: swath mod06l2 + path_mod06 <- + list.files( + testthat::test_path("..", "testdata/modis"), + "MOD06", + full.names = TRUE + ) + testthat::expect_no_error( + suppressWarnings( + cloud0 <- process_modis_swath( + path = path_mod06, + subdataset = c("Cloud_Fraction_Day"), + date = "2021-08-15" + ) + ) + ) + + testthat::expect_no_error( + suppressWarnings( + calc_mod06 <- + calc_modis_par( + from = path_mod06, + locs = site_faux, + subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), + preprocess = process_modis_swath, + name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), + nthreads = 1 + ) + ) + ) + testthat::expect_s3_class(calc_mod06, "data.frame") + + # with geometry + testthat::expect_no_error( + suppressWarnings( + calc_mod06_geom <- + calc_modis_par( + from = path_mod06, + locs = site_faux, + subdataset = c("Cloud_Fraction_Day", "Cloud_Fraction_Night"), + preprocess = process_modis_swath, + name_covariates = c("MOD_CLFRN_0_", "MOD_CLFRD_0_"), + nthreads = 1, + geom = TRUE + ) + ) + ) + testthat::expect_s4_class(calc_mod06_geom, "SpatVector") + + # case 3: VIIRS + path_vnp46 <- + list.files( + testthat::test_path("..", "testdata/modis"), + "VNP46", + full.names = TRUE + ) + testthat::expect_warning( + base_vnp <- process_blackmarble( + path = path_vnp46, + date = "2018-08-13", + tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)) + ) + ) + + testthat::expect_no_error( + suppressWarnings( + calc_vnp46 <- + calc_modis_par( + from = path_vnp46, + locs = site_faux, + preprocess = process_blackmarble, + name_covariates = c("MOD_NITLT_0_"), + subdataset = 3L, + nthreads = 1, + tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)) + ) + ) + ) + testthat::expect_s3_class(calc_vnp46, "data.frame") + + # with geometry (as SpatVector) + testthat::expect_no_error( + suppressWarnings( + calc_vnp46_geom_v <- + calc_modis_par( + from = path_vnp46, + locs = site_faux, + preprocess = process_blackmarble, + name_covariates = c("MOD_NITLT_0_"), + subdataset = 3L, + nthreads = 1, + tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), + geom = TRUE + ) + ) + ) + testthat::expect_s4_class(calc_vnp46_geom_v, "SpatVector") + + + # with geometry (as sf) + testthat::expect_no_error( + suppressWarnings( + calc_vnp46_geom_sf <- + calc_modis_par( + from = path_vnp46, + locs = sf::st_as_sf(site_faux), + preprocess = process_blackmarble, + name_covariates = c("MOD_NITLT_0_"), + subdataset = 3L, + nthreads = 1, + tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)), + geom = TRUE + ) + ) + ) + testthat::expect_s4_class(calc_vnp46_geom_sf, "SpatVector") + + # error cases + testthat::expect_error( + process_modis_merge(path = site_faux) + ) + testthat::expect_error( + process_modis_merge( + path = path_mod11, + date = "2021-08-15", + fun_agg = 3L + ) + ) + testthat::expect_error( + process_modis_merge( + path = path_mod11, + date = "2021~08~15", + fun_agg = "mean" + ) + ) + + site_faux_r <- site_faux + names(site_faux_r)[1] <- "ID" + testthat::expect_error( + calc_modis_daily( + from = rast(nrow = 3, ncol = 3), + date = "2021-08-15", + locs = site_faux_r + ) + ) + testthat::expect_error( + calc_modis_daily( + from = rast(nrow = 3, ncol = 3), + date = "2021-08-15", + locs = matrix(c(1, 3, 4, 5), nrow = 2) + ) + ) + testthat::expect_error( + calc_modis_daily( + from = rast(nrow = 3, ncol = 3), + date = "2021-08-15", + locs = sf::st_as_sf(site_faux) + ) + ) + testthat::expect_error( + calc_modis_daily( + from = terra::rast(nrow = 3, ncol = 3, vals = 1:9, names = "a"), + date = "2021-08-15", + locs = array(1:12, dim = c(2, 2, 3)) + ) + ) + site_faux0 <- site_faux + names(site_faux0)[2] <- "date" + testthat::expect_error( + calc_modis_daily( + from = rast(nrow = 3, ncol = 3), + date = "2021-08-15", + locs = sf::st_as_sf(site_faux0) + ) + ) + site_faux2 <- site_faux + #site_faux2[, 4] <- NULL + + path_mcd19 <- + testthat::test_path( + "../testdata/modis/", + "MCD19A2.A2021227.h11v05.061.2023149160635.hdf" + ) + mcd_merge <- + process_modis_merge( + path = path_mcd19, + date = "2021-08-15", + subdataset = "(Optical_Depth)" + ) + + testthat::expect_no_error( + calc_modis_daily( + from = mcd_merge, + date = "2021-08-15", + locs = sf::st_as_sf(site_faux2), + radius = 1000, + name_extracted = "MCD_EXTR_1K_" + ) + ) + + # test calc_modis_daily directly with geometry + testthat::expect_no_error( + calc_mod_geom <- calc_modis_daily( + from = mcd_merge, + date = "2021-08-15", + locs = sf::st_as_sf(site_faux2), + radius = 1000, + name_extracted = "MCD_EXTR_1K_", + geom = TRUE + ) + ) + testthat::expect_s4_class(calc_mod_geom, "SpatVector") + + testthat::expect_error( + calc_modis_par(from = site_faux) + ) + testthat::expect_error( + calc_modis_par(from = path_mod11, product = "MOD11A1", locs = list(1, 2, 3)) + ) + testthat::expect_error( + calc_modis_par( + from = path_vnp46, + locs = site_faux, + preprocess = "fountain", + name_covariates = c("MOD_NITLT_0_", "MOD_K1_"), + subdataset = 3L, + nthreads = 1 + ) + ) + testthat::expect_warning( + calc_modis_par( + from = path_vnp46, + locs = site_faux, + preprocess = process_blackmarble, + name_covariates = c("MOD_NITLT_0_", "MOD_K1_"), + subdataset = 3L, + nthreads = 2, + tile_df = process_blackmarble_corners(c(9, 10), c(5, 5)) + ) + ) + testthat::expect_warning( + flushed <- calc_modis_par( + from = path_vnp46, + locs = site_faux, + name_covariates = c("MOD_NITLT_0_"), + preprocess = process_blackmarble, + subdataset = 3L, + nthreads = 1, + radius = c(-1000, 0L) + ) + ) + testthat::expect_s3_class(flushed, "data.frame") + testthat::expect_true(unlist(flushed[, 2]) == -99999) + +}) +# nolint end diff --git a/tests/testthat/test-narr.R b/tests/testthat/test-narr.R new file mode 100644 index 00000000..a6919cf2 --- /dev/null +++ b/tests/testthat/test-narr.R @@ -0,0 +1,226 @@ +################################################################################ +##### unit and integration tests for NOAA NARR functions + +##### download_narr +testthat::test_that("download_narr (no errors)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + year_start <- 2018 + year_end <- 2018 + variables <- c( + "weasd", # monolevel + "omega", # pressure level + "soill" # subsurface + ) + directory_to_save <- paste0(tempdir(), "/narr/") + # run download function + download_data(dataset_name = "narr", + year = c(year_start, year_end), + variables = variables, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE) + # define path with commands + commands_path <- paste0(directory_to_save, + "narr_", + year_start, "_", year_end, + "_curl_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 6) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 5L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_narr (expected errors)", { + testthat::expect_error( + download_data( + dataset_name = "narr", + variables = "weasd", + year = c(10, 11), + acknowledgement = TRUE, + directory_to_save = testthat::test_path("..", "testdata/", "") + ) + ) +}) + +testthat::test_that("narr_variable (expected errors)", { + # expected error due to unrecognized variable name + testthat::expect_error( + narr_variable("uNrEcOgNiZed") + ) +}) + +##### process_narr +testthat::test_that("process_narr", { + withr::local_package("terra") + variables <- c( + "weasd", + "omega" + ) + # expect function + expect_true( + is.function(process_narr) + ) + for (v in seq_along(variables)) { + narr <- + process_narr( + date = c("2018-01-01", "2018-01-01"), + variable = variables[v], + path = + testthat::test_path( + "..", + "testdata", + "narr", + variables[v] + ) + ) + # expect output is SpatRaster + expect_true( + class(narr)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(narr) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(narr)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(narr)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(narr)) + ) + # expect dimensions according to levels + if (variables[v] == "weasd") { + expect_true( + dim(narr)[3] == 1 + ) + } else if (variables[v] == "omega") { + expect_true( + dim(narr)[3] == 29 + ) + } + } + # test with cropping extent + testthat::expect_no_error( + narr_ext <- + process_narr( + date = c("2018-01-01", "2018-01-01"), + variable = "omega", + path = + testthat::test_path( + "..", + "testdata", + "narr", + "omega" + ), + extent = terra::ext(narr) + ) + ) +}) + +##### calc_narr +testthat::test_that("calc_narr", { + withr::local_package("terra") + variables <- c( + "weasd", + "omega" + ) + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_narr) + ) + for (v in seq_along(variables)) { + variable <- variables[v] + for (r in seq_along(radii)) { + narr <- + process_narr( + date = c("2018-01-01", "2018-01-01"), + variable = variable, + path = + testthat::test_path( + "..", + "testdata", + "narr", + variable + ) + ) + narr_covariate <- + calc_narr( + from = narr, + locs = ncp, + locs_id = "site_id", + radius = radii[r], + fun = "mean" + ) + # set column names + narr_covariate <- calc_setcolumns( + from = narr_covariate, + lag = 0, + dataset = "narr", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(narr_covariate) == "data.frame" + ) + if (variable == "weasd") { + # expect 3 columns (no pressure level) + expect_true( + ncol(narr_covariate) == 3 + ) + # expect numeric value + expect_true( + class(narr_covariate[, 3]) == "numeric" + ) + } else { + # expect 4 columns + expect_true( + ncol(narr_covariate) == 4 + ) + # expect numeric value + expect_true( + class(narr_covariate[, 4]) == "numeric" + ) + } + # expect $time is class Date + expect_true( + "POSIXct" %in% class(narr_covariate$time) + ) + } + } + # with geometry + testthat::expect_no_error( + narr_covariate_geom <- calc_narr( + from = narr, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(narr_covariate_geom), 4 # 4 columns because omega has pressure levels + ) + testthat::expect_true( + "SpatVector" %in% class(narr_covariate_geom) + ) +}) diff --git a/tests/testthat/test-nei.R b/tests/testthat/test-nei.R new file mode 100644 index 00000000..335c5d6c --- /dev/null +++ b/tests/testthat/test-nei.R @@ -0,0 +1,263 @@ +################################################################################ +##### unit and integration tests for U.S. EPA NEI functions +# nolint start + +################################################################################ +##### download_nei +testthat::test_that("download_nei", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + directory_to_save <- paste0(tempdir(), "/nei/") + certificate <- system.file("extdata/cacert_gaftp_epa.pem", + package = "amadeus") + # run download function + year <- c(2017L, 2020L) + download_data(dataset_name = "nei", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + year = year, + remove_command = FALSE, + epa_certificate_path = certificate + ) + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # define file path with commands + commands_path <- paste0( + download_sanitize_path(directory_to_save), + "NEI_AADT_", + paste(year, collapse = "-"), + "_", + Sys.Date(), + "_wget_commands.txt" + ) + + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 3) + # check HTTP URL status + url_status <- + httr::HEAD(urls[1], config = httr::config(cainfo = certificate)) + url_status <- url_status$status_code + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + # remove temporary nei + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_nei (live)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + directory_to_save <- paste0(tempdir(), "/nei/") + certificate <- system.file("extdata/cacert_gaftp_epa.pem", + package = "amadeus") + # run download function + year <- c(2017L, 2020L) + testthat::expect_no_error( + download_data(dataset_name = "nei", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = TRUE, + year = year, + remove_command = FALSE, + epa_certificate_path = certificate, + unzip = TRUE + ) + ) + testthat::expect_equal( + length(list.files(paste0(directory_to_save, "/zip_files"))), 2 + ) + testthat::expect_equal( + length(list.files( + paste0(directory_to_save, "/data_files"), + recursive = TRUE) + ), 12 + ) + # remove temporary nei + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_nei (expected errors)", { + # expected errors due to invalid certificate + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + tdir <- tempdir() + directory_to_save <- paste0(tempdir(), "/epa/") + certificate <- file.path(tdir, "cacert_gaftp_epa.pem") + # remove if there is a preexisting file + if (file.exists(certificate)) { + file.remove(certificate) + file.remove(gsub("pem", "crt", certificate)) + } + + # run download function + year <- c(2017L) + testthat::expect_message( + download_data(dataset_name = "nei", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + year = year, + remove_command = FALSE, + epa_certificate_path = certificate + ) + ) + # define file path with commands + commands_path <- paste0( + directory_to_save, + "NEI_AADT_", + paste(year, collapse = "-"), + "_", + Sys.Date(), + "_wget_commands.txt" + ) + # remove file with commands after test + testthat::expect_true(file.exists(commands_path)) + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_nei +testthat::test_that("process_nei", { + withr::local_package("terra") + + path_nei <- testthat::test_path("../testdata", "nei", "") + path_cnty <- system.file("gpkg/nc.gpkg", package = "sf") + path_cnty <- terra::vect(path_cnty) + path_cnty$GEOID <- path_cnty$FIPS + + testthat::expect_no_error( + neinc <- process_nei(path = path_nei, year = 2017, county = path_cnty) + ) + testthat::expect_s4_class(neinc, "SpatVector") + + # error cases + testthat::expect_error( + process_nei(testthat::test_path("../testdata", "modis"), year = 2017) + ) + testthat::expect_error( + process_nei(path_nei, year = 2030, county = path_cnty) + ) + testthat::expect_error( + process_nei(path_nei, year = 2020, county = NULL) + ) + testthat::expect_error( + process_nei(path_nei, year = 2020, county = array(1, 2)) + ) + names(path_cnty)[which(names(path_cnty) == "GEOID")] <- "COUNTYID" + testthat::expect_error( + process_nei(path_nei, year = 2020, county = path_cnty) + ) + testthat::expect_error( + process_nei("./EmPtY/pAtH", year = 2020, county = path_cnty) + ) +}) + +################################################################################ +##### calc_nei +testthat::test_that("calc_nei", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_package("data.table") + withr::local_options(list(sf_use_s2 = FALSE)) + withr::local_seed(202401) + + ncpath <- system.file("gpkg/nc.gpkg", package = "sf") + nc <- terra::vect(ncpath) + nc <- nc[grep("(Orange|Wake|Durham)", nc$NAME), ] + + neipath <- testthat::test_path("..", "testdata", "nei") + + testthat::expect_error( + neiras <- process_nei( + path = neipath, + county = nc, + year = 2017 + ) + ) + + nc$GEOID <- nc$FIPS + testthat::expect_no_error( + neiras <- process_nei( + path = neipath, + county = nc, + year = 2017 + ) + ) + # inspecting calculated results + testthat::expect_true(inherits(neiras, "SpatVector")) + testthat::expect_true(nrow(neiras) == 3) + + # sf case + testthat::expect_no_error( + neires <- process_nei( + path = neipath, + county = sf::st_as_sf(nc), + year = 2017 + ) + ) + testthat::expect_true(inherits(neires, "SpatVector")) + testthat::expect_true(nrow(neires) == 3) + + # error cases + testthat::expect_error( + process_nei(neipath, year = 2017) + ) + testthat::expect_error( + process_nei(neipath, "Orion/Betelgeuse", year = 2017) + ) + testthat::expect_error( + process_nei(neipath, nc, year = 2083) + ) + + # calc_nei + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + ncp$time <- 2018L + ncp <- terra::vect(ncp, keepgeom = TRUE, crs = "EPSG:4326") + nc <- terra::project(nc, "EPSG:4326") + + testthat::expect_no_error( + neicalced <- calc_nei( + locs = ncp, + from = neiras + ) + ) + testthat::expect_true(any(grepl("NEI", names(neicalced)))) + testthat::expect_equal(neicalced$TRF_NEINP_0_00000, 1579079, tolerance = 1) + + # with geometry + testthat::expect_no_error( + neicalced_geom <- calc_nei( + locs = ncp, + from = neiras, + geom = TRUE + ) + ) + testthat::expect_s4_class(neicalced_geom, "SpatVector") + + # more error cases + testthat::expect_condition( + calc_nei( + locs = "jittered", + from = neiras + ) + ) + +}) +# nolint end diff --git a/tests/testthat/test-nlcd.R b/tests/testthat/test-nlcd.R new file mode 100644 index 00000000..1e61989b --- /dev/null +++ b/tests/testthat/test-nlcd.R @@ -0,0 +1,295 @@ +################################################################################ +##### unit and integration tests for MLCR NLCD functions + +################################################################################ +##### download_nlcd +testthat::test_that("download_nlcd", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + years <- c(2021, 2019, 2016) + collections <- c(rep("Coterminous United States", 2), "Alaska") + collection_codes <- c(rep("l48", 2), "ak") + directory_to_save <- paste0(tempdir(), "/nlcd/") + # run download function + for (y in seq_along(years)) { + download_data(dataset_name = "nlcd", + year = years[y], + collection = collections[y], + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE, + unzip = FALSE, + remove_zip = FALSE) + # define file path with commands + commands_path <- paste0(download_sanitize_path(directory_to_save), + "nlcd_", + years[y], + "_land_cover_", + collection_codes[y], + "_", + Sys.Date(), + "_curl_command.txt") + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 5) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + } + testthat::expect_error( + download_data(dataset_name = "nlcd", + year = 2000, + collection = "Coterminous United States", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = TRUE, + unzip = FALSE, + remove_zip = FALSE) + ) + # remove temporary nlcd + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_nlcd +testthat::test_that("process_nlcd", { + withr::local_package("terra") + + path_nlcd19 <- + testthat::test_path( + "..", + "testdata" + ) + + testthat::expect_no_error( + nlcd19 <- process_nlcd(path = path_nlcd19, year = 2019) + ) + # test with extent cropping + testthat::expect_no_error( + nlcd19_ext <- process_nlcd( + path = path_nlcd19, + year = 2019, + extent = terra::ext(-1580000, -1520000, 1920000, 1980000) + ) + ) + testthat::expect_s4_class(nlcd19, "SpatRaster") + testthat::expect_equal(unname(terra::metags(nlcd19, name = "year")), "2019") + + # error cases + testthat::expect_error( + process_nlcd(path = 1L) + ) + testthat::expect_error( + process_nlcd(path = "/universe/galaxy/solarsys/earth/usa.nc") + ) + testthat::expect_error( + process_nlcd(path_nlcd19, "nineteen eighty-four") + ) + testthat::expect_error( + process_nlcd(path_nlcd19, year = 2020) + ) + # make duplicate with tif and img + tdir <- tempdir() + dir.create(paste0(tdir, "/nlcd_all")) + file.create(paste0(tdir, "/nlcd_all/nlcd_2019_land_cover_20240624.tif")) + file.create(paste0(tdir, "/nlcd_all/nlcd_2019_land_cover_20240624.img")) + testthat::expect_error( + process_nlcd(path = paste0(tdir, "/nlcd_all"), year = 2019) + ) + +}) + +################################################################################ +##### calc_nlcd +testthat::test_that("calc_nlcd", { + withr::local_package("terra") + withr::local_package("exactextractr") + withr::local_package("sf") + withr::local_package("future") + withr::local_package("future.apply") + withr::local_options( + list(sf_use_s2 = FALSE, future.resolve.recursive = 2L) + ) + + point_us1 <- cbind(lon = -114.7, lat = 38.9, site_id = 1) + point_us2 <- cbind(lon = -114, lat = 39, site_id = 2) + point_ak <- cbind(lon = -155.997, lat = 69.3884, site_id = 3) # alaska + point_fr <- cbind(lon = 2.957, lat = 43.976, site_id = 4) # france + eg_data <- rbind(point_us1, point_us2, point_ak, point_fr) |> + as.data.frame() |> + terra::vect(crs = "EPSG:4326") + + path_testdata <- + testthat::test_path( + "..", + "testdata" + ) + # CHECK INPUT (error message) + # -- buf_radius is numeric + testthat::expect_no_error( + nlcdras <- process_nlcd(path = path_testdata) + ) + testthat::expect_s4_class(nlcdras, "SpatRaster") + + testthat::expect_error( + calc_nlcd(locs = eg_data, + from = nlcdras, + radius = "1000"), + "radius is not a numeric." + ) + testthat::expect_error( + calc_nlcd(locs = eg_data, + from = nlcdras, + mode = "whatnot", + radius = 1000) + ) + # -- buf_radius has likely value + testthat::expect_error( + calc_nlcd(locs = eg_data, + from = nlcdras, + radius = -3), + "radius has not a likely value." + ) + + # -- two modes work properly + testthat::expect_no_error( + calc_nlcd(locs = sf::st_as_sf(eg_data), + from = nlcdras, + mode = "exact", + radius = 1000) + ) + testthat::expect_no_error( + calc_nlcd(locs = eg_data, + from = nlcdras, + mode = "terra", + radius = 300) + ) + # -- multicore mode works properly + testthat::expect_no_error( + calc_nlcd(locs = eg_data, + from = nlcdras, + mode = "exact", + radius = 1000, + nthreads = 2L) + ) + testthat::expect_no_error( + calc_nlcd(locs = eg_data, + from = nlcdras, + mode = "terra", + radius = 1000, + nthreads = 2L) + ) + + + # -- year is numeric + testthat::expect_error( + process_nlcd(path = path_testdata, year = "2021"), + "year is not a numeric." + ) + # -- year has likely value + testthat::expect_error( + process_nlcd(path = path_testdata, + year = 2032), + "NLCD data not available for this year." + ) + testthat::expect_error( + process_nlcd(path = path_testdata, + year = 1789), + "NLCD data not available for this year." + ) + testthat::expect_error( + calc_nlcd(locs = 12, + locs_id = "site_id", + from = nlcdras) + ) + testthat::expect_error( + calc_nlcd(locs = eg_data, + from = 12) + ) + # -- nlcd_path is not a character + testthat::expect_error( + process_nlcd(path = 3, + year = 2), + "path is not a character." + ) + # -- nlcd_path does not exist + nice_sentence <- "That's one small step for a man, a giant leap for mankind." + testthat::expect_error( + process_nlcd( + path = nice_sentence), + "path does not exist." + ) + + # CHECK OUTPUT + year <- 2021 + buf_radius <- 3000 + testthat::expect_no_error( + calc_nlcd( + locs = eg_data, + locs_id = "site_id", + from = nlcdras, + radius = buf_radius + ) + ) + output <- calc_nlcd( + locs = eg_data, + locs_id = "site_id", + radius = buf_radius, + from = nlcdras + ) + # -- returns a data.frame + testthat::expect_equal(class(output)[1], "data.frame") + # nrow(output) == nrow(input) + testthat::expect_equal(nrow(output), 4) + # -- initial names are still in the output data.frame + testthat::expect_true(all(names(eg_data) %in% names(output))) + # -- check the value of some of the points in the US + # the value has changed. What affected this behavior? + testthat::expect_equal( + output$LDU_TEFOR_0_03000[1], 0.8119843, tolerance = 1e-7 + ) + testthat::expect_equal( + output$LDU_TSHRB_0_03000[2], 0.9630467, tolerance = 1e-7 + ) + # -- class fraction rows should sum to 1 + testthat::expect_equal( + unname(rowSums(output[1:2, 3:(ncol(output))])), + rep(1, 2), + tolerance = 1e-7 + ) + # without geometry will have 11 columns + testthat::expect_equal( + ncol(output), 15 + ) + output_geom <- calc_nlcd( + locs = eg_data, + locs_id = "site_id", + radius = buf_radius, + from = nlcdras, + geom = TRUE + ) + # with geometry will have 12 columns + testthat::expect_equal( + ncol(output_geom), 15 + ) + testthat::expect_true( + "SpatVector" %in% class(output_geom) + ) +}) diff --git a/tests/testthat/test-olm.R b/tests/testthat/test-olm.R new file mode 100644 index 00000000..59bbba00 --- /dev/null +++ b/tests/testthat/test-olm.R @@ -0,0 +1,74 @@ +################################################################################ +##### unit and integration tests for OpenLandMap functions + +################################################################################ +##### download_olm +testthat::test_that("download_olm", { + withr::local_package("rstac") + links <- + readRDS( + system.file("extdata", "openlandmap_assets.rds", package = "amadeus") + ) + product <- "no2_s5p.l3.trop.tmwm" + format <- "p50_p90_2km*.*tif" + directory_to_save <- paste0(tempdir(), "/olm") + acknowledgement <- TRUE + download <- FALSE + + testthat::expect_no_error( + download_olm( + product = product, + format = format, + directory_to_save = directory_to_save, + acknowledgement = acknowledgement, + download = download, + remove_command = FALSE + ) + ) + + commands_path <- paste0( + directory_to_save, + "/OLM_queried_", + product, + "_", + Sys.Date(), + "_wget_commands.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 5) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_olm +testthat::test_that("process_olm", { + withr::local_package("terra") + tmwm <- testthat::test_path("..", "testdata", "openlandmap", + paste0( + "no2_s5p.l3.trop.tmwm.p50_p90_2km_a_20180501_", + "20221130_go_epsg.4326_v20221219_test.tif" + ) + ) + testthat::expect_no_error( + olm <- process_olm(path = tmwm) + ) + testthat::expect_s4_class(olm, "SpatRaster") + testthat::expect_error( + process_olm(path = 1L) + ) + + # test with cropping extent + testthat::expect_no_error( + olm_ext <- process_olm(path = tmwm, extent = terra::ext(olm)) + ) +}) diff --git a/tests/testthat/test-population.R b/tests/testthat/test-population.R new file mode 100644 index 00000000..d8d86548 --- /dev/null +++ b/tests/testthat/test-population.R @@ -0,0 +1,263 @@ +################################################################################ +##### unit and integration tests for NASA SEDAC population functions + +################################################################################ +##### download_sedac_population +testthat::test_that("download_sedac_population", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + years <- c("2020", "all") + data_formats <- c("GeoTIFF", "ASCII") + data_resolutions <- cbind(c("30 second"), + c("30_sec")) + directory_to_save <- paste0(tempdir(), "/pop/") + for (f in seq_along(data_formats)) { + data_format <- data_formats[f] + for (y in seq_along(years)) { + year <- years[y] + for (r in seq_len(nrow(data_resolutions))) { + # run download function + download_data(dataset_name = "sedac_population", + year = year, + data_format = data_format, + data_resolution = data_resolutions[r, 1], + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + unzip = FALSE, + remove_zip = FALSE, + remove_command = FALSE) + # expect sub-directories to be created + testthat::expect_true( + length( + list.files( + directory_to_save, include.dirs = TRUE + ) + ) == 3 + ) + # define file path with commands + if (year == "all") { + year <- "totpop" + } else { + year <- year + } + if (year == "totpop" && data_resolutions[r, 2] == "30_sec") { + resolution <- "2pt5_min" + } else { + resolution <- data_resolutions[r, 2] + } + commands_path <- paste0(download_sanitize_path(directory_to_save), + "sedac_population_", + year, + "_", + resolution, + "_", + Sys.Date(), + "_curl_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 11) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "GET") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + # remove temporary population + unlink(directory_to_save, recursive = TRUE) + } + } + } +}) + +testthat::test_that("download_sedac_population (coerce data types)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + year <- c("all") + data_formats <- c("GeoTIFF", "ASCII", "netCDF") + data_resolutions <- c("30 second", "2pt5_min") + directory_to_save <- paste0(tempdir(), "/pop/") + for (f in seq_along(data_formats)) { + download_data(dataset_name = "sedac_population", + year = year, + data_format = data_formats[f], + data_resolution = data_resolutions[1], + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + unzip = FALSE, + remove_zip = FALSE, + remove_command = FALSE) + commands_path <- paste0(directory_to_save, + "sedac_population_", + "totpop", + "_", + data_resolutions[2], + "_", + Sys.Date(), + "_curl_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 11) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "GET") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) + } +}) + +################################################################################ +##### process_sedac_population +testthat::test_that("process_sedac_population (no errors)", { + withr::local_package("terra") + paths <- list.files( + testthat::test_path( + "..", + "testdata", + "population" + ), + pattern = ".tif", + full.names = TRUE + ) + # expect function + expect_true( + is.function(process_sedac_population) + ) + for (p in seq_along(paths)) { + pop <- + process_sedac_population( + path = paths[p] + ) + # expect output is a SpatRaster + expect_true( + class(pop)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(pop) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(pop)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(pop)[1:2]) + ) + } + # test with cropping extent + testthat::expect_no_error( + pop_ext <- process_sedac_population( + paths[1], + extent = terra::ext(pop) + ) + ) +}) + +testthat::test_that("process_sedac_population (expect null)", { + pop <- + process_sedac_population( + testthat::test_path( + "..", + "testdata", + "population", + "pLaCeHoLdEr.nc" + ) + ) + expect_true( + is.null(pop) + ) +}) + +testthat::test_that("process_sedac_codes", { + string <- "2.5 minute" + testthat::expect_no_error( + code <- process_sedac_codes(string) + ) + testthat::expect_equal(code, "2pt5_min") +}) + +################################################################################ +##### calc_sedac_population +testthat::test_that("calc_sedac_population", { + withr::local_package("terra") + withr::local_package("data.table") + paths <- list.files(testthat::test_path( + "..", "testdata", "population" + ), full.names = TRUE) + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_sedac_population) + ) + for (p in seq_along(paths)) { + path <- paths[p] + for (r in seq_along(radii)) { + pop <- + process_sedac_population( + path = paths + ) + pop_covariate <- + calc_sedac_population( + from = pop, + locs = data.table::data.table(ncp), + locs_id = "site_id", + radius = radii[r], + fun = "mean" + ) + # set column names + pop_covariate <- calc_setcolumns( + from = pop_covariate, + lag = 0, + dataset = "pop", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(pop_covariate) == "data.frame" + ) + # expect 4 columns + expect_true( + ncol(pop_covariate) == 3 + ) + # expect numeric value + expect_true( + class(pop_covariate[, 3]) == "numeric" + ) + # expect $time is class integer for year + expect_true( + "integer" %in% class(pop_covariate$time) + ) + } + } + # with included geometry + testthat::expect_no_error( + pop_covariate_geom <- calc_sedac_population( + from = pop, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(pop_covariate_geom), 3 + ) + testthat::expect_true( + "SpatVector" %in% class(pop_covariate_geom) + ) +}) diff --git a/tests/testthat/test-prism.R b/tests/testthat/test-prism.R new file mode 100644 index 00000000..018f6337 --- /dev/null +++ b/tests/testthat/test-prism.R @@ -0,0 +1,148 @@ +################################################################################ +##### unit and integration tests for PRISM functions + +################################################################################ +##### download_prism +testthat::test_that("download_prism", { + # Set up test data + time <- seq(201005, 201012, by = 1) + element <- c("ppt", "tmin", "tmax", "tmean", "tdmean", + "vpdmin", "vpdmax") + # in case of multiple test runs + # note that PRISM download for the same data element + # is allowed up to twice a day. IP address could be blocked + # if the limit is exceeded + time <- sample(time, 1) + element <- sample(element, 1) + data_type <- "ts" + format <- "nc" + directory_to_save <- paste0(tempdir(), "/prism/") + acknowledgement <- TRUE + download <- FALSE + remove_command <- FALSE + + # Call the function + download_prism( + time = time, + element = element, + data_type = data_type, + format = format, + directory_to_save = directory_to_save, + acknowledgement = acknowledgement, + download = download, + remove_command = remove_command + ) + + testthat::expect_message( + download_prism( + time = time, + element = "ppt", + data_type = "normals", + format = "asc", + directory_to_save = directory_to_save, + acknowledgement = acknowledgement, + download = download, + remove_command = TRUE + ) + ) + + commands_path <- paste0( + directory_to_save, + "PRISM_", + element, + "_", + data_type, + "_", + time, + "_", + Sys.Date(), + "_wget_commands.txt" + ) + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 6) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + + # Set up test data + time <- "202105" + element <- "soltotal" + data_type <- "ts" + format <- "nc" + directory_to_save <- paste0(tempdir(), "/prism/") + acknowledgement <- TRUE + download <- FALSE + remove_command <- FALSE + + # Call the function and expect an error + testthat::expect_error(download_prism( + time = time, + element = element, + data_type = data_type, + format = format, + directory_to_save = directory_to_save, + acknowledgement = acknowledgement, + download = download, + remove_command = remove_command + )) + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_prism +testthat::test_that("process_prism", { + # Set up test data + withr::local_package("terra") + path <- testthat::test_path( + "..", "testdata", "prism", "PRISM_tmin_30yr_normal_4kmD1_0228_bil_test.nc" + ) + path_dir <- testthat::test_path( + "..", "testdata", "prism" + ) + element <- "tmin" + time <- "0228" + + # Call the function + testthat::expect_no_error(result <- process_prism(path, element, time)) + testthat::expect_no_error(result2 <- process_prism(path_dir, element, time)) + + # Check the return type + testthat::expect_true(inherits(result, "SpatRaster")) + testthat::expect_true(inherits(result2, "SpatRaster")) + + # Check the metadata + testthat::expect_equal(unname(terra::metags(result)["time"]), time) + testthat::expect_equal(unname(terra::metags(result)["element"]), element) + + # Set up test data + path_bad <- "/path/to/nonexistent/folder" + element_bad <- "invalid_element" + time_bad <- "invalid_time" + + # Call the function and expect an error + testthat::expect_error(process_prism(NULL, element, time)) + testthat::expect_error( + testthat::expect_warning( + process_prism(path_bad, element, time) + ) + ) + testthat::expect_error(process_prism(path_dir, element_bad, time)) + testthat::expect_error(process_prism(path_dir, element, time_bad)) + + # test with cropping extent + testthat::expect_no_error( + result_ext <- process_prism( + path, + element, + time, + extent = terra::ext(result) + ) + ) +}) diff --git a/tests/testthat/test-process.R b/tests/testthat/test-process.R index cbe1982a..443d782e 100644 --- a/tests/testthat/test-process.R +++ b/tests/testthat/test-process.R @@ -1,5 +1,10 @@ -# test process_covariates #### -testthat::test_that("test generic process_covariates", { +################################################################################ +##### unit and integration tests for process_covariates and auxiliary functions +# nolint start + +################################################################################ +##### process_covariates +testthat::test_that("process_covariates", { withr::local_package("terra") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -89,489 +94,9 @@ testthat::test_that("test generic process_covariates", { ) }) -# test MODIS suites #### -testthat::test_that("test MODIS prefilter", { - # main test - txt_products <- c("MOD11A1", "MOD13A2", "MOD09GA", "MCD19A2") - txt_exp_output <- - c( - MOD11A1 = "(LST_)", - MOD13A2 = "(NDVI)", - MOD09GA = "(sur_refl_b0)", - MCD19A2 = "(Optical_Depth)" - ) - txt_exp_output <- unname(txt_exp_output) - # expect - testthat::expect_message( - mcdtest <- process_modis_sds("MCD19A2") - ) - testthat::expect_equal( - mcdtest, "(Optical_Depth)" - ) - testthat::expect_no_error( - process_modis_sds("MCD19A2", "(cos|RelAZ|Angle)") - ) - for (i in 1:3) { - testthat::expect_equal( - process_modis_sds(txt_products[i]), txt_exp_output[i] - ) - } - testthat::expect_no_error( - filt_other <- process_modis_sds("ignored", "(cos)") - ) - testthat::expect_equal(filt_other, "(cos)") - -}) - - -testthat::test_that("process_flatten_sds", { - withr::local_package("terra") - withr::local_package("stars") - withr::local_options(list(sf_use_s2 = FALSE)) - - mcd19 <- testthat::test_path( - "..", "testdata", "modis", "MCD19A2.A2021227.h11v05.061.2023149160635.hdf" - ) - mod09 <- testthat::test_path( - "..", "testdata", "modis", "MOD09GA.A2021227.h11v05.061.2021229035936.hdf" - ) - - # main test: mcd19 - testthat::expect_no_error( - mcdaggr <- - process_flatten_sds( - path = mcd19, - subdataset = "Optical_Depth", - fun_agg = "mean" - ) - ) - testthat::expect_s4_class(mcdaggr, "SpatRaster") - testthat::expect_equal(terra::nlyr(mcdaggr), 2L) - testthat::expect_equal( - all(grepl("^Optical", names(mcdaggr))), - TRUE - ) - - # flatten error - path_mod06 <- - testthat::test_path( - "..", "testdata", "modis", - "MOD06_L2.A2021227.0320.061.2021227134022.hdf" - ) - - testthat::expect_error( - process_flatten_sds( - path = path_mod06, - subdataset = "(Fraction)", - fun_agg = "mean" - ) - ) - - # mod09 test - mod09_sub <- - sprintf("HDF4_EOS:EOS_GRID:%s:MODIS_Grid_500m_2D:sur_refl_b01_1", mod09) - # main test: mcd19 - testthat::expect_no_error( - modaggr <- - process_flatten_sds( - path = mod09_sub, - subdataset = NULL, - fun_agg = "mean" - ) - ) - testthat::expect_s4_class(modaggr, "SpatRaster") - testthat::expect_equal(terra::nlyr(modaggr), 1L) - testthat::expect_true(grepl("^500m Surface", names(modaggr))) -}) - - -testthat::test_that("process_modis_merge is good to go", { - withr::local_package("terra") - withr::local_package("stars") - withr::local_options(list(sf_use_s2 = FALSE)) - - path_mod11 <- - testthat::test_path( - "../testdata/modis/", - "MOD11A1.A2021227.h11v05.061.2021228105320.hdf" - ) - testthat::expect_no_error( - process_modis_merge( - path = path_mod11, - date = "2021-08-15", - subdataset = "(LST_)" - ) - ) - # case 2: standard mod13a2 - path_mod13 <- - testthat::test_path( - "../testdata/modis/", - "MOD13A2.A2021225.h11v05.061.2021320163751.hdf" - ) - testthat::expect_no_error( - process_modis_merge( - path = path_mod13, - date = "2021-08-13", - subdataset = "(NDVI)" - ) - ) - - # case 3: standard mcd19a2 - path_mcd19 <- - testthat::test_path( - "../testdata/modis/", - "MCD19A2.A2021227.h11v05.061.2023149160635.hdf" - ) - testthat::expect_no_error( - process_modis_merge( - path = path_mcd19, - date = "2021-08-15", - subdataset = "(Optical_Depth)" - ) - ) - - # case 3: standard mcd19a2 - path_mod09 <- - testthat::test_path( - "../testdata/modis/", - "MOD09GA.A2021227.h11v05.061.2021229035936.hdf" - ) - testthat::expect_no_error( - process_modis_merge( - path = path_mod09, - date = "2021-08-15", - subdataset = "(sur_refl_b0)" - ) - ) - - # multiple files - paths_mod13 <- list.files( - testthat::test_path("../testdata/modis/"), - pattern = "MOD13A2", - full.names = TRUE - ) - testthat::expect_no_error( - process_modis_merge( - path = paths_mod13, - date = "2021-08-13", - subdataset = "(NDVI)" - ) - ) - testthat::expect_error( - process_modis_merge( - path = paths_mod13, - date = "2021-08-13", - subdataset = "(NDVI)", - fun_agg = 3L - ) - ) - -}) - - -testthat::test_that("VNP46 preprocess tests", { - withr::local_package("terra") - - path_vnp46 <- - list.files( - testthat::test_path("..", "testdata", "modis"), - "^VNP46A2", - full.names = TRUE - ) - - testthat::expect_no_error( - corn <- process_blackmarble_corners() - ) - testthat::expect_error( - process_blackmarble_corners(hrange = c(99, 104)) - ) - - testthat::expect_warning( - vnp46_proc <- process_blackmarble( - path = path_vnp46[1], - tile_df = corn, - date = "2018-08-13" - ) - ) - testthat::expect_s4_class(vnp46_proc, "SpatRaster") - testthat::expect_equal(terra::nlyr(vnp46_proc), 1L) - - testthat::expect_warning( - vnp46_proc2 <- process_blackmarble( - path = path_vnp46[1], - tile_df = corn, - subdataset = c(3L, 5L), - date = "2018-08-13" - ) - ) - - testthat::expect_s4_class(vnp46_proc2, "SpatRaster") - testthat::expect_equal(terra::nlyr(vnp46_proc2), 2L) - - testthat::expect_error( - process_blackmarble( - path = path_vnp46[1], - tile_df = corn, - date = "2018~08~13" - ) - ) - -}) - - -testthat::test_that("Swath warping abides", { - withr::local_package("stars") - withr::local_package("terra") - withr::local_options(list(sf_use_s2 = FALSE)) - - path_mod06 <- - testthat::test_path( - "..", "testdata", "modis", - "MOD06_L2.A2021227.0320.061.2021227134022.hdf" - ) - path_mod06 <- - sprintf("HDF4_EOS:EOS_SWATH:%s:mod06:Cloud_Fraction_Night", path_mod06) - # internal warning from stars - testthat::expect_warning( - warped <- process_modis_warp( - path = path_mod06 - ) - ) - testthat::expect_s3_class(warped, "stars") - testthat::expect_equal( - unname(stars::st_res(warped)[1]), 0.1, tolerance = 1e-6 - ) - - path_mod06s <- - list.files( - testthat::test_path("..", "testdata", "modis"), - pattern = "MOD06_L2", - full.names = TRUE - ) - - testthat::expect_warning( - warped4 <- process_modis_swath( - path = path_mod06s, - date = "2021-08-15", - subdataset = c("Cloud_Fraction_Night", "Cloud_Fraction_Day") - ) - ) - testthat::expect_s4_class(warped4, "SpatRaster") - - -}) - - -testthat::test_that("Other MODIS function errors", { - withr::local_package("terra") - withr::local_package("stars") - withr::local_options(list(sf_use_s2 = FALSE)) - path_mod06 <- - testthat::test_path( - "..", "testdata", "modis", - "MOD06_L2.A2021227.0320.061.2021227134022.hdf" - ) - path_mod06e <- - sprintf("HDF4_EOS:EOS_SWATH:%s:mod06:Cloud_Fraction_Night", path_mod06) - - testthat::expect_no_error( - suppressWarnings( - process_modis_swath( - path = path_mod06, - subdataset = "Cloud_Fraction_Night", - date = "2021-08-15" - ) - ) - ) - testthat::expect_error( - process_modis_swath( - path = path_mod06, - subdataset = "Cloud_Fraction_Night", - date = "2021~08~15" - ) - ) - testthat::expect_error( - process_modis_swath( - path = path_mod06, - subdataset = "Cloud_Fraction_Night", - date = "2021-13-15" - ) - ) - testthat::expect_error( - process_modis_swath( - path = path_mod06, - subdataset = "Cloud_Fraction_Night", - date = "2021-12-45" - ) - ) -}) - - -# test Ecoregions #### -testthat::test_that("read ecoregion", { - withr::local_package("terra") - withr::local_package("sf") - withr::local_options(list(sf_use_s2 = FALSE)) - - path_eco <- testthat::test_path("..", "testdata", "eco_l3_clip.gpkg") - testthat::expect_no_error( - eco <- process_ecoregion(path_eco) - ) - - # test with cropping extent - testthat::expect_no_error( - process_ecoregion(path_eco, extent = terra::ext(eco)) - ) - ecotemp <- sf::st_read(path_eco) - # nolint start - addpoly <- - "POLYGON ((-70.2681 43.6787, -70.252234 43.677145, -70.251036 -43.680758, -70.268666 43.681505, -70.2681 43.6787))" - # nolint end - addpoly <- sf::st_as_sfc(addpoly, crs = "EPSG:4326") - addpoly <- sf::st_transform(addpoly, sf::st_crs(ecotemp)) - ecotemp[1, "geom"] <- addpoly - tdir <- tempdir() - sf::st_write(ecotemp, paste0(tdir, "/ecoregions.gpkg"), append = FALSE) - testthat::expect_no_error( - suppressWarnings(process_ecoregion(paste0(tdir, "/ecoregions.gpkg"))) - ) -}) - - -# test NLCD #### -testthat::test_that("process_nlcd tests", { - withr::local_package("terra") - - path_nlcd19 <- - testthat::test_path( - "..", - "testdata" - ) - - testthat::expect_no_error( - nlcd19 <- process_nlcd(path = path_nlcd19, year = 2019) - ) - # test with extent cropping - testthat::expect_no_error( - nlcd19_ext <- process_nlcd( - path = path_nlcd19, - year = 2019, - extent = terra::ext(-1580000, -1520000, 1920000, 1980000) - ) - ) - testthat::expect_s4_class(nlcd19, "SpatRaster") - testthat::expect_equal(unname(terra::metags(nlcd19, name = "year")), "2019") - - # error cases - testthat::expect_error( - process_nlcd(path = 1L) - ) - testthat::expect_error( - process_nlcd(path = "/universe/galaxy/solarsys/earth/usa.nc") - ) - testthat::expect_error( - process_nlcd(path_nlcd19, "nineteen eighty-four") - ) - testthat::expect_error( - process_nlcd(path_nlcd19, year = 2020) - ) - # make duplicate with tif and img - tdir <- tempdir() - dir.create(paste0(tdir, "/nlcd_all")) - file.create(paste0(tdir, "/nlcd_all/nlcd_2019_land_cover_20240624.tif")) - file.create(paste0(tdir, "/nlcd_all/nlcd_2019_land_cover_20240624.img")) - testthat::expect_error( - process_nlcd(path = paste0(tdir, "/nlcd_all"), year = 2019) - ) - -}) - - -# test Koppen-Geiger #### -testthat::test_that("process_koppen_geiger tests", { - withr::local_package("terra") - path_kgeiger <- - testthat::test_path("../testdata", "koppen_subset.tif") - - testthat::expect_no_error( - kgeiger <- process_koppen_geiger(path_kgeiger) - ) - - # test with cropping extent - testthat::expect_no_error( - kgeiger_ext <- process_koppen_geiger( - path_kgeiger, - extent = terra::ext(kgeiger) - ) - ) - testthat::expect_s4_class(kgeiger, "SpatRaster") - - path_kgeiger_f <- - testthat::test_path("../testdata", "kop", "Beck_KG_V1_future_0p5.tif") - testthat::expect_no_error( - kgeiger_f <- process_koppen_geiger(path_kgeiger_f) - ) -}) - -# test TRI #### -testthat::test_that("process_tri tests", { - withr::local_package("terra") - path_tri <- testthat::test_path("../testdata", "tri", "") - - testthat::expect_no_error( - tri_r <- process_tri(path = path_tri) - ) - testthat::expect_s4_class(tri_r, "SpatVector") - - # test with cropping extent - testthat::expect_no_error( - tri_r_ext <- process_tri( - path = path_tri, - extent = terra::ext(tri_r) - ) - ) - testthat::expect_s4_class(tri_r, "SpatVector") -}) - -# test NEI #### -testthat::test_that("process_nei tests", { - withr::local_package("terra") - - path_nei <- testthat::test_path("../testdata", "nei", "") - path_cnty <- system.file("gpkg/nc.gpkg", package = "sf") - path_cnty <- terra::vect(path_cnty) - path_cnty$GEOID <- path_cnty$FIPS - - testthat::expect_no_error( - neinc <- process_nei(path = path_nei, year = 2017, county = path_cnty) - ) - testthat::expect_s4_class(neinc, "SpatVector") - - # error cases - testthat::expect_error( - process_nei(testthat::test_path("../testdata", "modis"), year = 2017) - ) - testthat::expect_error( - process_nei(path_nei, year = 2030, county = path_cnty) - ) - testthat::expect_error( - process_nei(path_nei, year = 2020, county = NULL) - ) - testthat::expect_error( - process_nei(path_nei, year = 2020, county = array(1, 2)) - ) - names(path_cnty)[which(names(path_cnty) == "GEOID")] <- "COUNTYID" - testthat::expect_error( - process_nei(path_nei, year = 2020, county = path_cnty) - ) - testthat::expect_error( - process_nei("./EmPtY/pAtH", year = 2020, county = path_cnty) - ) -}) - - -## ephemeral: process_conformity tests -testthat::test_that("process_conformity tests", { +################################################################################ +##### process_conformity +testthat::test_that("process_conformity", { withr::local_package("terra") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -608,432 +133,15 @@ testthat::test_that("process_conformity tests", { }) -# test SEDAC population #### -testthat::test_that("process_sedac_population returns expected.", { - withr::local_package("terra") - paths <- list.files( +################################################################################ +##### process_collection +testthat::test_that("process_collection", { + path <- list.files( testthat::test_path( "..", "testdata", - "population" - ), - pattern = ".tif", - full.names = TRUE - ) - # expect function - expect_true( - is.function(process_sedac_population) - ) - for (p in seq_along(paths)) { - pop <- - process_sedac_population( - path = paths[p] - ) - # expect output is a SpatRaster - expect_true( - class(pop)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(pop) - ) - # expect non-null coordinate reference system - expect_false( - is.null(terra::crs(pop)) - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(pop)[1:2]) - ) - } - # test with cropping extent - testthat::expect_no_error( - pop_ext <- process_sedac_population( - paths[1], - extent = terra::ext(pop) - ) - ) -}) - -testthat::test_that("process_sedac_population returns null for netCDF.", { - pop <- - process_sedac_population( - testthat::test_path( - "..", - "testdata", - "population", - "pLaCeHoLdEr.nc" - ) - ) - expect_true( - is.null(pop) - ) -}) - -testthat::test_that("sedac_codes", { - string <- "2.5 minute" - testthat::expect_no_error( - code <- process_sedac_codes(string) - ) - testthat::expect_equal(code, "2pt5_min") -}) - - -# test HMS #### -testthat::test_that("process_hms with present polygons", { - withr::local_package("terra") - # expect function - testthat::expect_true( - is.function(process_hms) - ) - hms <- - process_hms( - date = c("2022-06-10", "2022-06-13"), - path = testthat::test_path( - "..", - "testdata", - "hms" - ) - ) - # expect output is a SpatVector or character - testthat::expect_true( - methods::is(hms, "SpatVector") - ) - # expect non-null coordinate reference system - testthat::expect_false( - is.null(terra::crs(hms)) - ) - # expect two columns - testthat::expect_true( - ncol(hms) == 2 - ) - # expect density and date column - testthat::expect_true( - all(c("Density", "Date") %in% names(hms)) - ) - # test with cropping extent - testthat::expect_no_error( - hms_ext <- process_hms( - date = c("2022-06-10", "2022-06-11"), - path = testthat::test_path( - "..", - "testdata", - "hms" - ), - extent = terra::ext(hms) - ) - ) -}) - -testthat::test_that("process_hms with missing polygons (12/31/2018).", { - withr::local_package("terra") - # expect function - testthat::expect_true( - is.function(process_hms) - ) - hms <- - process_hms( - date = c("2018-12-31", "2018-12-31"), - path = testthat::test_path( - "..", - "testdata", - "hms" - ) - ) - # expect character - testthat::expect_true(is.character(hms)) -}) - -# test GMTED #### -testthat::test_that("process_gmted returns expected.", { - withr::local_package("terra") - statistics <- c( - "Breakline Emphasis", "Systematic Subsample" - ) - resolutions <- c( - "7.5 arc-seconds", "15 arc-seconds", "30 arc-seconds" - ) - # expect function - expect_true( - is.function(process_gmted) - ) - for (s in seq_along(statistics)) { - statistic <- statistics[s] - for (r in seq_along(resolutions)) { - resolution <- resolutions[r] - gmted <- - process_gmted( - variable = c(statistic, resolution), - path = - testthat::test_path( - "..", - "testdata", - "gmted", - paste0( - process_gmted_codes( - statistic, - statistic = TRUE, - invert = FALSE - ), - process_gmted_codes( - resolution, - resolution = TRUE, - invert = FALSE - ), - "_grd" - ) - ) - ) - # expect output is a SpatRaster - expect_true( - class(gmted)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(gmted) - ) - # expect non-null coordinate reference system - expect_false( - is.null(terra::crs(gmted)) - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(gmted)[1:2]) - ) - } - } - # test with cropping extent - testthat::expect_no_error( - gmted_ext <- - process_gmted( - variable = c("Breakline Emphasis", "7.5 arc-seconds"), - path = - testthat::test_path( - "..", - "testdata", - "gmted", - "be75_grd" - ), - ext = terra::ext(gmted) - ) - ) -}) - -testthat::test_that("import_gmted returns error with non-vector variable.", { - expect_error( - gmted <- - process_gmted( - variable <- "Breakline Emphasis; 7.5 arc-seconds", - path = testthat::test_path( - "..", - "testdata", - "gmted" - ) - ) - ) -}) - -testthat::test_that("gmted_codes inversion", { - teststring <- "mx" - testthat::expect_no_error( - statorig <- process_gmted_codes( - teststring, - statistic = TRUE, - resolution = FALSE, - invert = TRUE - ) - ) - testthat::expect_equal(statorig, "Maximum Statistic") - - teststring <- "75" - testthat::expect_no_error( - resoorig <- process_gmted_codes( - teststring, - statistic = FALSE, - resolution = TRUE, - invert = TRUE - ) - ) - testthat::expect_equal(resoorig, "7.5 arc-seconds") -}) - - -## test NARR #### -testthat::test_that("process_narr returns expected.", { - withr::local_package("terra") - variables <- c( - "weasd", - "omega" - ) - # expect function - expect_true( - is.function(process_narr) - ) - for (v in seq_along(variables)) { - narr <- - process_narr( - date = c("2018-01-01", "2018-01-01"), - variable = variables[v], - path = - testthat::test_path( - "..", - "testdata", - "narr", - variables[v] - ) - ) - # expect output is SpatRaster - expect_true( - class(narr)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(narr) - ) - # expect non-null coordinate reference system - expect_false( - is.null(terra::crs(narr)) - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(narr)[1:2]) - ) - # expect non-numeric and non-empty time - expect_false( - any(c("", 0) %in% terra::time(narr)) - ) - # expect dimensions according to levels - if (variables[v] == "weasd") { - expect_true( - dim(narr)[3] == 1 - ) - } else if (variables[v] == "omega") { - expect_true( - dim(narr)[3] == 29 - ) - } - } - # test with cropping extent - testthat::expect_no_error( - narr_ext <- - process_narr( - date = c("2018-01-01", "2018-01-01"), - variable = "omega", - path = - testthat::test_path( - "..", - "testdata", - "narr", - "omega" - ), - extent = terra::ext(narr) - ) - ) -}) - -# test GEOS-CF #### -testthat::test_that("process_geos returns expected.", { - withr::local_package("terra") - collections <- c( - "a", - "c" - ) - # expect function - expect_true( - is.function(process_geos) - ) - for (c in seq_along(collections)) { - collection <- collections[c] - geos <- - process_geos( - date = c("2018-01-01", "2018-01-01"), - variable = "O3", - path = - testthat::test_path( - "..", - "testdata", - "geos", - collection - ) - ) - # expect output is SpatRaster - expect_true( - class(geos)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(geos) - ) - # expect non-null coordinate reference system - expect_false( - terra::crs(geos) == "" - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(geos)[1:2]) - ) - # expect non-numeric and non-empty time - expect_false( - any(c("", 0) %in% terra::time(geos)) - ) - # expect time dimension is POSIXt for hourly - expect_true( - "POSIXt" %in% class(terra::time(geos)) - ) - # expect seconds in time information - expect_true( - "seconds" %in% terra::timeInfo(geos) - ) - # expect dimensions according to collection - if (collection == "a") { - expect_true( - dim(geos)[3] == 1 - ) - } else if (collection == "c") { - expect_true( - dim(geos)[3] == 5 - ) - } - } - # test with cropping extent - testthat::expect_no_error( - geos_ext <- process_geos( - date = c("2018-01-01", "2018-01-01"), - variable = "O3", - path = - testthat::test_path( - "..", - "testdata", - "geos", - "c" - ), - extent = terra::ext(geos) - ) - ) -}) - -testthat::test_that("process_geos expected errors.", { - # expect error without variable - expect_error( - process_geos() - ) - # expect error on directory without data - expect_error( - process_geos( - variable = "O3", - path = "./" - ) - ) -}) - -# test support functions #### -testthat::test_that("proccess support functions return expected.", { - path <- list.files( - testthat::test_path( - "..", - "testdata", - "geos", - "a" + "geos", + "a" ), full.names = TRUE ) @@ -1066,7 +174,9 @@ testthat::test_that("proccess support functions return expected.", { ) }) -testthat::test_that("process_locs_vector vector data and missing columns.", { +################################################################################ +##### process_locs_vector +testthat::test_that("process_locs_vector", { withr::local_package("terra") ncp <- data.frame(lon = -78.8277, lat = 35.95013) ncp$site_id <- "3799900018810101" @@ -1145,621 +255,9 @@ testthat::test_that("process_locs_vector vector data and missing columns.", { ) }) -# test AQS #### -testthat::test_that("process_aqs", { - withr::local_package("terra") - withr::local_package("data.table") - withr::local_package("sf") - withr::local_package("dplyr") - withr::local_options(list(sf_use_s2 = FALSE)) - - aqssub <- testthat::test_path( - "..", - "testdata", - "aqs_daily_88101_triangle.csv" - ) - testd <- testthat::test_path( - "..", "testdata" - ) - - # main test - testthat::expect_no_error( - aqsft <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "date-location", - return_format = "terra" - ) - ) - testthat::expect_no_error( - aqsst <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "available-data", - return_format = "terra" - ) - ) - testthat::expect_no_error( - aqslt <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "location", - return_format = "terra" - ) - ) - - # expect - testthat::expect_s4_class(aqsft, "SpatVector") - testthat::expect_s4_class(aqsst, "SpatVector") - testthat::expect_s4_class(aqslt, "SpatVector") - - testthat::expect_no_error( - aqsfs <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "date-location", - return_format = "sf" - ) - ) - testthat::expect_no_error( - aqsss <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "available-data", - return_format = "sf" - ) - ) - testthat::expect_no_error( - aqsls <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "location", - return_format = "sf" - ) - ) - testthat::expect_s3_class(aqsfs, "sf") - testthat::expect_s3_class(aqsss, "sf") - testthat::expect_s3_class(aqsls, "sf") - - testthat::expect_no_error( - aqsfd <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "date-location", - return_format = "data.table" - ) - ) - testthat::expect_no_error( - aqssd <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "available-data", - return_format = "data.table" - ) - ) - testthat::expect_no_error( - aqssdd <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "available-data", - data_field = "Arithmetic.Mean", - return_format = "data.table" - ) - ) - testthat::expect_no_error( - aqsld <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "location", - return_format = "data.table" - ) - ) - testthat::expect_no_error( - aqsldd <- process_aqs( - path = aqssub, - date = c("2022-02-04", "2022-02-28"), - mode = "location", - data_field = "Arithmetic.Mean", - return_format = "data.table" - ) - ) - testthat::expect_s3_class(aqsfd, "data.table") - testthat::expect_s3_class(aqssd, "data.table") - testthat::expect_s3_class(aqssdd, "data.table") - testthat::expect_s3_class(aqsld, "data.table") - testthat::expect_s3_class(aqsldd, "data.table") - - testthat::expect_no_error( - aqssf <- process_aqs( - path = testd, - date = c("2022-02-04", "2022-02-28"), - mode = "location", - return_format = "sf" - ) - ) - - tempd <- tempdir() - testthat::expect_error( - process_aqs( - path = tempd, - date = c("2022-02-04", "2022-02-28"), - return_format = "sf" - ) - ) - - # expect - testthat::expect_s3_class(aqssf, "sf") - - # error cases - testthat::expect_error( - process_aqs(testthat::test_path("../testdata", "modis")) - ) - testthat::expect_error( - process_aqs(path = 1L) - ) - testthat::expect_error( - process_aqs(path = aqssub, date = c("January", "Januar")) - ) - testthat::expect_error( - process_aqs(path = aqssub, date = c("2021-08-15")) - ) - testthat::expect_error( - process_aqs(path = aqssub, date = NULL) - ) - testthat::expect_no_error( - process_aqs( - path = aqssub, date = c("2022-02-04", "2022-02-28"), - mode = "available-data", return_format = "sf", - extent = c(-79, 33, -78, 36) - ) - ) - testthat::expect_no_error( - process_aqs( - path = aqssub, date = c("2022-02-04", "2022-02-28"), - mode = "available-data", return_format = "sf", - extent = c(-79, 33, -78, 36) - ) - ) - testthat::expect_warning( - process_aqs( - path = aqssub, date = c("2022-02-04", "2022-02-28"), - mode = "available-data", return_format = "data.table", - extent = c(-79, -78, 33, 36) - ), - "Extent is not applicable for data.table. Returning data.table..." - ) -}) - -# test SEDAC GRoads #### -testthat::test_that("test process_sedac_groads", { - withr::local_package("terra") - - # main test - testthat::expect_no_error( - groads <- process_sedac_groads( - path = testthat::test_path("../testdata/groads_test.shp") - ) - ) - # expect - testthat::expect_s4_class(groads, "SpatVector") - # error cases - testthat::expect_error( - process_sedac_groads(path = 1L) - ) - # test with cropping extent - testthat::expect_no_error( - groads_ext <- process_sedac_groads( - path = testthat::test_path("../testdata/groads_test.shp"), - extent = terra::ext(groads) - ) - ) -}) - -# test MERRA2 #### -testthat::test_that("process_merra2 returns as expected.", { - withr::local_package("terra") - #* indicates three dimensional data that has subset to single - #* pressure level for test data set - collection <- c( - "inst1_2d_int_Nx", "inst3_2d_gas_Nx", "inst3_3d_chm_Nv", #* - "inst6_3d_ana_Np", #* - "statD_2d_slv_Nx", "tavg1_2d_chm_Nx", "tavg3_3d_udt_Np" #* - ) - variable <- c( - "CPT", "AODANA", "AIRDENS", #* - "SLP", #* - "HOURNORAIN", "COCL", "DUDTANA" #* - ) - merra2_df <- data.frame(collection, variable) - # expect function - expect_true( - is.function(process_merra2) - ) - for (c in seq_along(merra2_df$collection)) { - merra2 <- - process_merra2( - date = c("2018-01-01", "2018-01-01"), - variable = merra2_df$variable[c], - path = - testthat::test_path( - "..", - "testdata", - "merra2", - merra2_df$collection[c] - ) - ) - # expect output is SpatRaster - expect_true( - class(merra2)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(merra2) - ) - # expect non-null coordinate reference system - expect_false( - terra::crs(merra2) == "" - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(merra2)[1:2]) - ) - # expect non-numeric and non-empty time - expect_false( - any(c("", 0) %in% terra::time(merra2)) - ) - # expect time dimension is POSIXt for hourly - expect_true( - "POSIXt" %in% class(terra::time(merra2)) - ) - # expect seconds in time information - expect_true( - "seconds" %in% terra::timeInfo(merra2) - ) - # expect 8 levels for 3 hourly data - expect_true( - all(dim(merra2) == c(2, 3, 1)) - ) - } - class(merra2) - # test with cropping extent - testthat::expect_no_error( - merra2_ext <- process_merra2( - date = c("2018-01-01", "2018-01-01"), - variable = "CPT", - path = - testthat::test_path( - "..", - "testdata", - "merra2", - "inst1_2d_int_Nx" - ), - extent = terra::ext(merra2) - ) - ) -}) - -# test GridMET #### -testthat::test_that("process_gridmet returns expected.", { - withr::local_package("terra") - variable <- "Precipitation" - # expect function - expect_true( - is.function(process_gridmet) - ) - gridmet <- - process_gridmet( - date = c("2018-01-03", "2018-01-03"), - variable = variable, - path = - testthat::test_path( - "..", - "testdata", - "gridmet", - "pr" - ) - ) - # expect output is SpatRaster - expect_true( - class(gridmet)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(gridmet) - ) - # expect non-null coordinate reference system - expect_false( - is.null(terra::crs(gridmet)) - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(gridmet)[1:2]) - ) - # expect non-numeric and non-empty time - expect_false( - any(c("", 0) %in% terra::time(gridmet)) - ) - # expect dimensions according to levels - expect_true( - dim(gridmet)[3] == 1 - ) - # test with cropping extent - testthat::expect_no_error( - gridmet_ext <- process_gridmet( - date = c("2018-01-03", "2018-01-03"), - variable = "Precipitation", - path = - testthat::test_path( - "..", - "testdata", - "gridmet", - "pr" - ), - extent = terra::ext(gridmet) - ) - ) -}) - -# test TerraClimate #### -testthat::test_that("process_terraclimate returns expected.", { - withr::local_package("terra") - variable <- "ppt" - # expect function - expect_true( - is.function(process_terraclimate) - ) - terraclimate <- - process_terraclimate( - date = c("2018-01-01", "2018-01-01"), - variable = variable, - path = - testthat::test_path( - "..", - "testdata", - "terraclimate", - "ppt" - ) - ) - # expect output is SpatRaster - expect_true( - class(terraclimate)[1] == "SpatRaster" - ) - # expect values - expect_true( - terra::hasValues(terraclimate) - ) - # expect non-null coordinate reference system - expect_false( - is.null(terra::crs(terraclimate)) - ) - # expect lon and lat dimensions to be > 1 - expect_false( - any(c(0, 1) %in% dim(terraclimate)[1:2]) - ) - # expect non-numeric and non-empty time - expect_false( - any(c("", 0) %in% terra::time(terraclimate)) - ) - # expect dimensions according to levels - expect_true( - dim(terraclimate)[3] == 1 - ) - # test with cropping extent - testthat::expect_no_error( - terraclimate_ext <- process_terraclimate( - date = c("2018-01-01", "2018-01-01"), - variable = "ppt", - path = - testthat::test_path( - "..", - "testdata", - "terraclimate", - "ppt" - ), - extent = terra::ext(terraclimate) - ) - ) -}) - -testthat::test_that("gridmet and terraclimate auxiliary functions.", { - # gridmet - gc1 <- process_gridmet_codes("all") - expect_true(ncol(gc1) == 2) - gc2 <- process_gridmet_codes("sph", invert = TRUE) - expect_true(class(gc2) == "character") - expect_true(nchar(gc2) > 7) - gc3 <- process_gridmet_codes("Near-Surface Specific Humidity") - expect_true(class(gc3) == "character") - expect_true(nchar(gc3) < 7) - # terraclimate - tc1 <- process_terraclimate_codes("all") - expect_true(ncol(gc1) == 2) - tc2 <- process_terraclimate_codes("aet", invert = TRUE) - expect_true(class(gc2) == "character") - expect_true(nchar(gc2) > 7) - tc3 <- process_terraclimate_codes("Actual Evapotranspiration") - expect_true(class(gc3) == "character") - expect_true(nchar(gc3) < 7) - # process_variable_codes - expect_no_error(process_variable_codes("sph", "gridmet")) - expect_no_error( - process_variable_codes("Near-Surface Specific Humidity", "gridmet") - ) - expect_error( - process_variable_codes("error", "gridmet") - ) - expect_no_error(process_variable_codes("aet", "terraclimate")) - expect_no_error( - process_variable_codes("Actual Evapotranspiration", "terraclimate") - ) - expect_error( - process_variable_codes("error", "terraclimate") - ) -}) - -# test PRISM #### -testthat::test_that( - "process_prism returns a SpatRaster object with correct metadata", - { - # Set up test data - withr::local_package("terra") - path <- testthat::test_path( - "..", "testdata", "prism", "PRISM_tmin_30yr_normal_4kmD1_0228_bil_test.nc" - ) - path_dir <- testthat::test_path( - "..", "testdata", "prism" - ) - element <- "tmin" - time <- "0228" - - # Call the function - testthat::expect_no_error(result <- process_prism(path, element, time)) - testthat::expect_no_error(result2 <- process_prism(path_dir, element, time)) - - # Check the return type - testthat::expect_true(inherits(result, "SpatRaster")) - testthat::expect_true(inherits(result2, "SpatRaster")) - - # Check the metadata - testthat::expect_equal(unname(terra::metags(result)["time"]), time) - testthat::expect_equal(unname(terra::metags(result)["element"]), element) - - # Set up test data - path_bad <- "/path/to/nonexistent/folder" - element_bad <- "invalid_element" - time_bad <- "invalid_time" - - # Call the function and expect an error - testthat::expect_error(process_prism(NULL, element, time)) - testthat::expect_error( - testthat::expect_warning( - process_prism(path_bad, element, time) - ) - ) - testthat::expect_error(process_prism(path_dir, element_bad, time)) - testthat::expect_error(process_prism(path_dir, element, time_bad)) - - # test with cropping extent - testthat::expect_no_error( - result_ext <- process_prism( - path, - element, - time, - extent = terra::ext(result) - ) - ) - } -) - - -# test CropScape #### -testthat::test_that( - "process_cropscape returns a SpatRaster object with correct metadata", { - # Set up test data - withr::local_package("terra") - filepath <- - testthat::test_path("..", "testdata/cropscape/cdl_30m_r_nc_2019_sub.tif") - dirpath <- testthat::test_path("..", "testdata/cropscape") - year <- 2019 - - # Call the function - testthat::expect_no_error(result <- process_cropscape(filepath, year)) - testthat::expect_no_error(process_cropscape(dirpath, year)) - - # test with cropping extent - testthat::expect_no_error( - result_ext <- process_cropscape( - filepath, year, extent = terra::ext(result) - ) - ) - - # Check the return type - testthat::expect_true(inherits(result, "SpatRaster")) - - # Check the metadata - testthat::expect_equal( - unname(terra::metags(result)["year"]), - as.character(year) - ) - - # error cases - testthat::expect_error(process_cropscape(path = 0, year = "MILLENNIUM")) - testthat::expect_error( - process_cropscape(path = "/home/some/path", year = "MILLENNIUM") - ) - } -) - -# test HUC #### -testthat::test_that("process_huc", - { - withr::local_package("terra") - withr::local_package("sf") - withr::local_package("nhdplusTools") - withr::local_options(list(sf_use_s2 = FALSE)) - # Set up test data - path <- testthat::test_path( - "..", "testdata", "huc12", "NHDPlus_test.gpkg" - ) - - # Call the function - testthat::expect_error(process_huc(path)) - testthat::expect_no_error( - result <- - process_huc( - path, - layer_name = "NHDPlus_test", - huc_level = "HUC_12", - huc_header = "030202" - ) - ) - testthat::expect_true(inherits(result, "SpatVector")) - - # query case - testthat::expect_no_error( - result <- - process_huc( - path, - layer_name = "NHDPlus_test", - huc_level = "HUC_12", - huc_header = "030202" - ) - ) - testthat::expect_true(inherits(result, "SpatVector")) - - testthat::expect_error( - process_huc( - path, - layer_name = "HUc", - huc_level = "HUC_12", - huc_header = "030202" - ) - ) - - # Set up test data - path2 <- testthat::test_path( - "..", "testdata", "huc12" - ) - - # Call the function and expect an error - testthat::expect_error(process_huc(path2)) - - # test with cropping extent - testthat::expect_no_error( - huc_ext <- process_huc( - path, - layer_name = "NHDPlus_test", - huc_level = "HUC_12", - huc_header = "030202", - extent = terra::ext(result) - ) - ) - } -) - - -# AUX tests #### -testthat::test_that("loc_radius tests", { +################################################################################ +##### process_locs_vector + process_locs_radius +testthat::test_that("process_locs_vector + process_locs_radius", { withr::local_package("terra") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -1827,8 +325,9 @@ testthat::test_that("process_locs_vector tests", { testthat::expect_true(terra::geomtype(dfdftrb) == "polygons") }) -# apply_extent -testthat::test_that("apply_extent tests", { +################################################################################ +##### apply extent +testthat::test_that("apply_extent", { withr::local_package("terra") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -1858,3 +357,4 @@ testthat::test_that("apply_extent tests", { testthat::expect_s3_class(dfsftr, "sf") testthat::expect_s4_class(dfdftr, "SpatVector") }) +# nolint end diff --git a/tests/testthat/test-process_olm.R b/tests/testthat/test-process_olm.R deleted file mode 100644 index 8ea5d54d..00000000 --- a/tests/testthat/test-process_olm.R +++ /dev/null @@ -1,21 +0,0 @@ - -# test OpenLandMap #### -# nolint start -testthat::test_that("process_olm", { - withr::local_package("terra") - tmwm <- testthat::test_path("..", "testdata", "openlandmap", - "no2_s5p.l3.trop.tmwm.p50_p90_2km_a_20180501_20221130_go_epsg.4326_v20221219_test.tif") - testthat::expect_no_error( - olm <- process_olm(path = tmwm) - ) - testthat::expect_s4_class(olm, "SpatRaster") - testthat::expect_error( - process_olm(path = 1L) - ) - - # test with cropping extent - testthat::expect_no_error( - olm_ext <- process_olm(path = tmwm, extent = terra::ext(olm)) - ) -}) -# nolint end diff --git a/tests/testthat/test-sedc.R b/tests/testthat/test-sedc.R new file mode 100644 index 00000000..92d73fcd --- /dev/null +++ b/tests/testthat/test-sedc.R @@ -0,0 +1,76 @@ +################################################################################ +##### unit and integration tests for Sum of Exponential Decay functions + +################################################################################ +##### calc_sedc +testthat::test_that("calc_sedc", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_package("dplyr") + withr::local_package("tidyr") + withr::local_package("data.table") + withr::local_options(sf_use_s2 = FALSE) + + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + ncp$time <- 2018L + ncpt <- + terra::vect(ncp, geom = c("lon", "lat"), + keepgeom = TRUE, crs = "EPSG:4326") + path_tri <- testthat::test_path("..", "testdata", "tri") + + testthat::expect_no_error( + tri_r <- process_tri(path = path_tri, year = 2018) + ) + tri_r <- terra::project(tri_r, terra::crs(ncpt)) + + targcols <- grep("FUGITIVE_", names(tri_r), value = TRUE) + testthat::expect_no_error( + tri_sedc <- + calc_sedc( + locs = ncpt, + from = tri_r, + locs_id = "site_id", + sedc_bandwidth = 30000, + target_fields = targcols + ) + ) + testthat::expect_s3_class(tri_sedc, "data.frame") + + testthat::expect_no_error( + calc_sedc( + locs = sf::st_as_sf(ncpt), + from = sf::st_as_sf(tri_r), + locs_id = "site_id", + sedc_bandwidth = 30000, + target_fields = targcols + ) + ) + + # with geometry + testthat::expect_no_error( + tri_sedc_geom <- calc_sedc( + locs = ncpt, + from = tri_r, + locs_id = "site_id", + sedc_bandwidth = 30000, + target_fields = targcols, + geom = TRUE + ) + ) + testthat::expect_s4_class(tri_sedc_geom, "SpatVector") + + # warning case: duplicate field names between locs and from + ncpta <- ncpt + ncpta$YEAR <- 2018 + testthat::expect_warning( + calc_sedc( + locs = ncpta, + from = sf::st_as_sf(tri_r), + locs_id = "site_id", + sedc_bandwidth = 30000, + target_fields = targcols + ) + ) + +}) diff --git a/tests/testthat/test-manipulate_spacetime_data.R b/tests/testthat/test-spacetime.R similarity index 88% rename from tests/testthat/test-manipulate_spacetime_data.R rename to tests/testthat/test-spacetime.R index b22a2ee1..5481efc3 100644 --- a/tests/testthat/test-manipulate_spacetime_data.R +++ b/tests/testthat/test-spacetime.R @@ -1,3 +1,8 @@ +################################################################################ +##### unit and integration tests for spacetime manipulation functions + +################################################################################ +##### check_mysftime testthat::test_that("check_mysftime works as expected", { withr::local_package("data.table") withr::local_package("sf") @@ -70,8 +75,9 @@ testthat::test_that("check_mysftime works as expected", { ) }) - -testthat::test_that("check_mysf works as expected", { +################################################################################ +##### check_mysf +testthat::test_that("check_mysf", { withr::local_package("data.table") withr::local_package("sf") withr::local_options(list(sf_use_s2 = FALSE)) @@ -128,7 +134,9 @@ testthat::test_that("check_mysf works as expected", { ) }) -testthat::test_that("rename_time works as expected", { +################################################################################ +##### rename_time +testthat::test_that("rename_time", { withr::local_package("data.table") withr::local_package("sf") withr::local_package("sftime") @@ -154,7 +162,9 @@ testthat::test_that("rename_time works as expected", { ) }) -testthat::test_that("dt_as_mysftime works as expected", { +################################################################################ +##### dt_as_mysftime +testthat::test_that("dt_as_mysftime", { # open testing data stdata <- data.table::fread(paste0( testthat::test_path("..", "testdata/", ""), @@ -197,7 +207,9 @@ testthat::test_that("dt_as_mysftime works as expected", { ) }) -testthat::test_that("as_mysftime works as expected", { +################################################################################ +##### as_mysftime +testthat::test_that("as_mysftime", { withr::local_package("terra") withr::local_package("data.table") # open testing data @@ -329,8 +341,9 @@ testthat::test_that("as_mysftime works as expected", { ) }) - -testthat::test_that("sftime_as_spatvector as expected", { +################################################################################ +##### sftime_as_spatvector +testthat::test_that("sftime_as_spatvector", { # open testing data stdata <- data.table::fread(paste0( testthat::test_path("..", "testdata/", ""), @@ -350,7 +363,9 @@ testthat::test_that("sftime_as_spatvector as expected", { testthat::expect_error(sftime_as_spatvector(stdata)) }) -testthat::test_that("sf_as_mysftime works as expected", { +################################################################################ +##### sf_as_mysftime +testthat::test_that("sf_as_mysftime", { withr::local_package("data.table") withr::local_package("sf") withr::local_package("sftime") @@ -373,7 +388,9 @@ testthat::test_that("sf_as_mysftime works as expected", { ) }) -testthat::test_that("sftime_as_mysftime works as expected", { +################################################################################ +##### sftime_as_mysftime +testthat::test_that("sftime_as_mysftime", { withr::local_package("data.table") withr::local_package("sf") withr::local_package("sftime") @@ -403,8 +420,9 @@ testthat::test_that("sftime_as_mysftime works as expected", { ) }) - -testthat::test_that("spatraster_as_sftime works as expected", { +################################################################################ +##### spatraster_as_sftime +testthat::test_that("spatraster_as_sftime", { withr::local_package("terra") withr::local_package("sftime") withr::local_options(list(sf_use_s2 = FALSE)) @@ -431,8 +449,9 @@ testthat::test_that("spatraster_as_sftime works as expected", { ) }) - -testthat::test_that("spatrds_as_sftime works as expected", { +################################################################################ +##### spatrds_as_sftime +testthat::test_that("spatrds_as_sftime", { withr::local_package("terra") withr::local_package("sftime") withr::local_options(list(sf_use_s2 = FALSE)) @@ -471,8 +490,9 @@ testthat::test_that("spatrds_as_sftime works as expected", { testthat::expect_equal(attributes(mysft)$time, "date") }) - -testthat::test_that("sftime_as_sf works as expected", { +################################################################################ +##### sftime_as_sf +testthat::test_that("sftime_as_sf", { withr::local_package("data.table") withr::local_package("sf") withr::local_package("sftime") @@ -503,7 +523,9 @@ testthat::test_that("sftime_as_sf works as expected", { ) }) -testthat::test_that("sftime_as_sf works as expected", { +################################################################################ +##### sftime_as_sf +testthat::test_that("sftime_as_sf", { withr::local_package("data.table") withr::local_package("sf") withr::local_package("sftime") @@ -533,8 +555,9 @@ testthat::test_that("sftime_as_sf works as expected", { ) }) - -testthat::test_that("sftime_as_spatraster works as expected", { +################################################################################ +##### sftime_as_spatraster +testthat::test_that("sftime_as_spatraster", { withr::local_package("terra") withr::local_package("sftime") withr::local_options(list(sf_use_s2 = FALSE)) @@ -556,8 +579,9 @@ testthat::test_that("sftime_as_spatraster works as expected", { ) }) - -testthat::test_that("sftime_as_spatrds works as expected", { +################################################################################ +##### sftime_as_spatrds +testthat::test_that("sftime_as_spatrds", { withr::local_package("terra") withr::local_package("sftime") withr::local_options(list(sf_use_s2 = FALSE)) diff --git a/tests/testthat/test-list_stac_files.R b/tests/testthat/test-stac.R similarity index 74% rename from tests/testthat/test-list_stac_files.R rename to tests/testthat/test-stac.R index 1b43a10b..ffb7af82 100644 --- a/tests/testthat/test-list_stac_files.R +++ b/tests/testthat/test-stac.R @@ -1,5 +1,9 @@ +################################################################################ +##### unit and integration tests for rstac listing functions -testthat::test_that("list_stac_files returns character vector of file links", { +################################################################################ +##### list_stac_files +testthat::test_that("list_stac_files", { withr::local_package("rstac") # Set up test data stac_json <- diff --git a/tests/testthat/test-terraclimate.R b/tests/testthat/test-terraclimate.R new file mode 100644 index 00000000..1d6756bd --- /dev/null +++ b/tests/testthat/test-terraclimate.R @@ -0,0 +1,222 @@ +################################################################################ +##### unit and integration tests for Climatology Group TerraClimate functions + +################################################################################ +##### download_terraclimate +testthat::test_that("download_terraclimate (no errors)", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + year_start <- 2018 + year_end <- 2023 + variables <- "Precipitation" + directory_to_save <- paste0(tempdir(), "/terracclimate/") + # run download function + download_data(dataset_name = "terraclimate", + year = c(year_start, year_end), + variables = variables, + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE) + # define path with commands + commands_path <- paste0(directory_to_save, + "/terraclimate_", + year_start, "_", year_end, + "_curl_commands.txt") + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 6) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 5L, method = "HEAD") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +testthat::test_that("download_terraclimate (expected errors - years)", { + testthat::expect_error( + download_data( + dataset_name = "terraclimate", + variables = "Precipitation", + year = c(10, 11), + acknowledgement = TRUE, + directory_to_save = paste0(tempdir(), "/epa/") + ) + ) +}) + +testthat::test_that("download_terraclimate (expected errors - variables)", { + testthat::expect_error( + download_data( + dataset_name = "gridmet", + variables = "temp", + year = c(2018, 2018), + acknowledgement = TRUE, + directory_to_save = paste0(tempdir(), "/epa/") + ) + ) +}) + +################################################################################ +##### process_terraclimate +testthat::test_that("process_terraclimate", { + withr::local_package("terra") + variable <- "ppt" + # expect function + expect_true( + is.function(process_terraclimate) + ) + terraclimate <- + process_terraclimate( + date = c("2018-01-01", "2018-01-01"), + variable = variable, + path = + testthat::test_path( + "..", + "testdata", + "terraclimate", + "ppt" + ) + ) + # expect output is SpatRaster + expect_true( + class(terraclimate)[1] == "SpatRaster" + ) + # expect values + expect_true( + terra::hasValues(terraclimate) + ) + # expect non-null coordinate reference system + expect_false( + is.null(terra::crs(terraclimate)) + ) + # expect lon and lat dimensions to be > 1 + expect_false( + any(c(0, 1) %in% dim(terraclimate)[1:2]) + ) + # expect non-numeric and non-empty time + expect_false( + any(c("", 0) %in% terra::time(terraclimate)) + ) + # expect dimensions according to levels + expect_true( + dim(terraclimate)[3] == 1 + ) + # test with cropping extent + testthat::expect_no_error( + terraclimate_ext <- process_terraclimate( + date = c("2018-01-01", "2018-01-01"), + variable = "ppt", + path = + testthat::test_path( + "..", + "testdata", + "terraclimate", + "ppt" + ), + extent = terra::ext(terraclimate) + ) + ) +}) + +testthat::test_that("process_terraclimate_codes", { + # terraclimate + tc1 <- process_terraclimate_codes("all") + expect_true(ncol(tc1) == 2) + tc2 <- process_terraclimate_codes("aet", invert = TRUE) + expect_true(class(tc2) == "character") + expect_true(nchar(tc2) > 7) + tc3 <- process_terraclimate_codes("Actual Evapotranspiration") + expect_true(class(tc3) == "character") + expect_true(nchar(tc3) < 7) + # process_variable_codes + expect_no_error(process_variable_codes("aet", "terraclimate")) + expect_no_error( + process_variable_codes("Actual Evapotranspiration", "terraclimate") + ) + expect_error( + process_variable_codes("error", "terraclimate") + ) +}) + +################################################################################ +##### calc_terraclimate +## 16. TerraClimate #### +testthat::test_that("calc_terraclimate", { + withr::local_package("terra") + withr::local_package("data.table") + radii <- c(0, 1000) + ncp <- data.frame(lon = -78.8277, lat = 35.95013) + ncp$site_id <- "3799900018810101" + # expect function + expect_true( + is.function(calc_terraclimate) + ) + for (r in seq_along(radii)) { + terraclimate <- + process_terraclimate( + date = c("2018-01-01", "2018-01-01"), + variable = "Precipitation", + path = + testthat::test_path( + "..", + "testdata", + "terraclimate", + "ppt" + ) + ) + terraclimate_covariate <- + calc_terraclimate( + from = terraclimate, + locs = data.table::data.table(ncp), + locs_id = "site_id", + radius = radii[r], + fun = "mean" + ) + # set column names + terraclimate_covariate <- calc_setcolumns( + from = terraclimate_covariate, + lag = 0, + dataset = "terraclimate", + locs_id = "site_id" + ) + # expect output is data.frame + expect_true( + class(terraclimate_covariate) == "data.frame" + ) + # expect 3 columns + expect_true( + ncol(terraclimate_covariate) == 3 + ) + # expect numeric value + expect_true( + class(terraclimate_covariate[, 3]) == "numeric" + ) + # expect date and time column + expect_true( + nchar(terraclimate_covariate$time)[1] == 6 + ) + } + # with included geometry + testthat::expect_no_error( + terraclimate_covariate_geom <- calc_terraclimate( + from = terraclimate, + locs = ncp, + locs_id = "site_id", + radius = 0, + fun = "mean", + geom = TRUE + ) + ) + testthat::expect_equal( + ncol(terraclimate_covariate_geom), 3 + ) + testthat::expect_true( + "SpatVector" %in% class(terraclimate_covariate_geom) + ) +}) diff --git a/tests/testthat/test-tri.R b/tests/testthat/test-tri.R new file mode 100644 index 00000000..d35a7ded --- /dev/null +++ b/tests/testthat/test-tri.R @@ -0,0 +1,138 @@ +################################################################################ +##### unit and integration tests for U.S. EPA TRI functions + +################################################################################ +##### download_tri +testthat::test_that("download_tri", { + withr::local_package("httr") + withr::local_package("stringr") + # function parameters + directory_to_save <- paste0(tempdir(), "/tri/") + # run download function + download_data(dataset_name = "tri", + directory_to_save = directory_to_save, + acknowledgement = TRUE, + download = FALSE, + remove_command = FALSE) + year_start <- 2018L + year_end <- 2022L + + # define file path with commands + commands_path <- paste0( + directory_to_save, + "TRI_", + year_start, "_", year_end, + "_", + Sys.Date(), + "_curl_commands.txt" + ) + + # import commands + commands <- read_commands(commands_path = commands_path) + # extract urls + urls <- extract_urls(commands = commands, position = 3) + # check HTTP URL status + url_status <- check_urls(urls = urls, size = 1L, method = "SKIP") + # implement unit tests + test_download_functions(directory_to_save = directory_to_save, + commands_path = commands_path, + url_status = url_status) + # remove file with commands after test + file.remove(commands_path) + unlink(directory_to_save, recursive = TRUE) +}) + +################################################################################ +##### process_tri +testthat::test_that("process_tri", { + withr::local_package("terra") + path_tri <- testthat::test_path("../testdata", "tri", "") + + testthat::expect_no_error( + tri_r <- process_tri(path = path_tri) + ) + testthat::expect_s4_class(tri_r, "SpatVector") + + # test with cropping extent + testthat::expect_no_error( + tri_r_ext <- process_tri( + path = path_tri, + extent = terra::ext(tri_r) + ) + ) + testthat::expect_s4_class(tri_r, "SpatVector") +}) + +################################################################################ +##### calc_tri +testthat::test_that("calc_tri", { + withr::local_package("terra") + withr::local_package("sf") + withr::local_package("dplyr") + withr::local_package("tidyr") + withr::local_package("data.table") + withr::local_options(sf_use_s2 = FALSE) + + ncp <- data.frame(lon = c(-78.8277, -78.0000), lat = c(35.95013, 80.000)) + ncp$site_id <- c("3799900018810101", "3799900018819999") + ncp$time <- 2018L + ncpt <- + terra::vect(ncp, geom = c("lon", "lat"), + keepgeom = TRUE, crs = "EPSG:4326") + ncpt$time <- 2018L + path_tri <- testthat::test_path("..", "testdata", "tri") + + testthat::expect_no_error( + tri_r <- process_tri(path = path_tri, year = 2018) + ) + testthat::expect_s4_class(tri_r, "SpatVector") + + testthat::expect_no_error( + tri_c <- calc_tri( + from = tri_r, + locs = ncpt, + radius = c(1500L, 50000L) + ) + ) + testthat::expect_true(is.data.frame(tri_c)) + + # with geometry + testthat::expect_no_error( + tri_c_geom <- calc_tri( + from = tri_r, + locs = ncpt, + radius = c(1500L, 50000L), + geom = TRUE + ) + ) + testthat::expect_s4_class(tri_c_geom, "SpatVector") + + testthat::expect_no_error( + calc_tri( + from = tri_r, + locs = sf::st_as_sf(ncpt), + radius = 50000L + ) + ) + testthat::expect_error( + calc_tri( + from = tempdir(), + locs = ncpt, + radius = 50000L + ) + ) + testthat::expect_error( + calc_tri( + from = paste0(tdir, "/tri/"), + locs = ncpt[, 1:2], + radius = 50000L + ) + ) + testthat::expect_error( + calc_tri( + from = paste0(tdir, "/tri/"), + locs = ncpt, + radius = "As far as the Earth's radius" + ) + ) +})