From 619acb756f139c2e75f78cb6ac6f0d8db579e861 Mon Sep 17 00:00:00 2001 From: Jim Thorson <50178738+James-Thorson-NOAA@users.noreply.github.com> Date: Wed, 10 Jan 2024 11:14:06 -0800 Subject: [PATCH] Dev (#91) * change paste0(.) to file.path(.) in file naming for plots * another fix for `plot_factors` * fix bug in switchover from paste0 to file.path for plots * fix another bug in switchover from paste0 to file.path * fix marginaleffects * adding sample-based calculation of approx. sample-size * small expansion of docs * update paste0 to file.path * change default so fit_model runs in tempdir() ... ... to decrease change of user-error in re-using existing extrapolation-grid or Kmeans objects * separate plot_legend from plot_variable * adding new summary.fit_model option ... ... to output dataframe of expanded values * fix bug in amend_output when plotting EOF factors * small updates * switch from INLA to fmesher dependency * eliminate rgdal * bug fix for rotate_factors given sum-to-zero constraint * fix bug in updates for fmesher package * fix error from rnaturalearth using "sf" output * remove warning from inla.barrier.fem.copy * Update DESCRIPTION * run `document` --------- Co-authored-by: Jim Thorson --- DESCRIPTION | 10 +- NAMESPACE | 4 + R/FishStatsUtils.R | 1 + R/amend_output.R | 8 +- R/calculate_proportion.R | 52 +++++--- R/convert_shapefile.R | 4 +- R/deprecated.R | 6 +- R/fit_model.R | 76 ++++++----- R/inla.barrier.fem.copy.R | 123 ++++++++++++++++++ R/make_extrapolation_info.R | 4 +- R/make_kmeans.R | 10 +- R/make_mesh.R | 73 ++++++++--- R/make_settings.R | 33 ++++- R/make_spatial_info.R | 26 +++- R/marginaleffects.R | 8 +- R/plot_biomass_index.R | 6 +- R/plot_clusters.R | 4 +- R/plot_data.R | 15 ++- R/plot_factors.R | 4 +- R/plot_index.R | 4 +- R/plot_legend.R | 40 ++++++ R/plot_maps.r | 2 +- R/plot_range_edge.R | 30 ++--- R/plot_range_index.R | 12 +- R/plot_residual_semivariance.R | 4 +- R/plot_results.R | 18 ++- R/plot_similarity.R | 8 +- R/plot_variable.R | 42 +++--- R/rotate_factors.R | 7 +- ...means-100.RData => Kmeans_knots-100.RData} | Bin man/calculate_proportion.Rd | 32 +++-- man/fit_model.Rd | 8 +- man/make_extrapolation_info.Rd | 2 +- man/make_kmeans.Rd | 2 +- man/make_mesh.Rd | 1 + man/make_settings.Rd | 15 +-- man/make_spatial_info.Rd | 3 +- man/plot_biomass_index.Rd | 2 +- man/plot_clusters.Rd | 2 +- man/plot_data.Rd | 2 +- man/plot_factors.Rd | 2 +- man/plot_index.Rd | 2 +- man/plot_legend.Rd | 19 +++ man/plot_maps.Rd | 2 +- man/plot_range_edge.Rd | 2 +- man/plot_range_index.Rd | 11 +- man/plot_residual_semivariance.Rd | 2 +- man/plot_results.Rd | 2 +- man/plot_similarity.Rd | 2 +- man/plot_variable.Rd | 4 +- man/predict.fit_model.Rd | 6 +- man/summary.fit_model.Rd | 2 +- 52 files changed, 542 insertions(+), 217 deletions(-) create mode 100644 R/inla.barrier.fem.copy.R create mode 100644 R/plot_legend.R rename inst/extdata/EBS_pollock/{Kmeans-100.RData => Kmeans_knots-100.RData} (100%) create mode 100644 man/plot_legend.Rd diff --git a/DESCRIPTION b/DESCRIPTION index 6ca7a2a..aa41a84 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -1,8 +1,8 @@ Package: FishStatsUtils Type: Package Title: Utilities (shared code and data) for FishStats spatio-temporal modeling toolbox -Version: 2.12.1 -Date: 2022-11-18 +Version: 2.13.0 +Date: 2024-01-10 Authors@R: c(person(given = "James", family = "Thorson", @@ -21,13 +21,12 @@ Imports: DHARMa, ecodist, fastcluster, - INLA, + fmesher, plotrix, RANN, sf, raster, reshape2, - rgdal, rnaturalearth, rnaturalearthdata, seriation, @@ -45,7 +44,8 @@ Depends: units, marginaleffects Enhances: - tidyr + tidyr, + INLA, Suggests: testthat Remotes: diff --git a/NAMESPACE b/NAMESPACE index 9ce4a67..cceb03c 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -60,6 +60,7 @@ export(plot_data) export(plot_dharma) export(plot_factors) export(plot_index) +export(plot_legend) export(plot_loadings) export(plot_maps) export(plot_overdispersion) @@ -76,3 +77,6 @@ export(rotate_factors) export(sample_variable) export(simulate_data) export(strip_units) +importFrom(fmesher,fm_evaluator) +importFrom(fmesher,fm_fem) +importFrom(fmesher,fm_mesh_2d) diff --git a/R/FishStatsUtils.R b/R/FishStatsUtils.R index a6e6d8b..e777c33 100644 --- a/R/FishStatsUtils.R +++ b/R/FishStatsUtils.R @@ -6,4 +6,5 @@ #' #' @docType package #' @name FishStatsUtils +#' @importFrom fmesher fm_mesh_2d fm_fem fm_evaluator NULL diff --git a/R/amend_output.R b/R/amend_output.R index b6ef242..d1cdc3f 100644 --- a/R/amend_output.R +++ b/R/amend_output.R @@ -91,15 +91,15 @@ function( fit = NULL, Num_ctm = abind::adrop(TmbData$Options_list$metadata_ctz[,,'num_notna',drop=FALSE], drop=3) %o% rep(1,TmbData$n_m) if( treat_missing_as_zero==TRUE ){ # if treat_missing_as_zero==TRUE, then switch density from year-categories with no data to zero - if("D_gct"%in%names(Report)) Report$D_gct = ifelse(Num_gct==0, 0, Report$D_gct) - if("D_gcy"%in%names(Report)) Report$D_gcy = ifelse(Num_gct==0, 0, Report$D_gcy) + if("D_gct"%in%names(Report) & all(dim(Report$D_gct)==dim(Num_gct))) Report$D_gct = ifelse(Num_gct==0, 0, Report$D_gct) + if("D_gcy"%in%names(Report) & all(dim(Report$D_gct)==dim(Num_gct))) Report$D_gcy = ifelse(Num_gct==0, 0, Report$D_gcy) if("Index_ctl"%in%names(Report)) Report$Index_ctl = ifelse(Num_ctl==0, 0, Report$Index_ctl) if("Index_cyl"%in%names(Report)) Report$Index_cyl = ifelse(Num_ctl==0, 0, Report$Index_cyl) }else{ # If some intercepts are mapped off, then switch density from year-categories with no data to NA if( any(is.na(Map$beta2_ft)) | any(is.na(Map$beta2_ft)) ){ - if("D_gct"%in%names(Report)) Report$D_gct = ifelse(Num_gct==0, NA, Report$D_gct) - if("D_gcy"%in%names(Report)) Report$D_gcy = ifelse(Num_gct==0, NA, Report$D_gcy) + if("D_gct"%in%names(Report) & all(dim(Report$D_gct)==dim(Num_gct))) Report$D_gct = ifelse(Num_gct==0, NA, Report$D_gct) + if("D_gcy"%in%names(Report) & all(dim(Report$D_gct)==dim(Num_gct))) Report$D_gcy = ifelse(Num_gct==0, NA, Report$D_gcy) if("Index_ctl"%in%names(Report)) Report$Index_ctl = ifelse(Num_ctl==0, NA, Report$Index_ctl) if("Index_cyl"%in%names(Report)) Report$Index_cyl = ifelse(Num_ctl==0, NA, Report$Index_cyl) } diff --git a/R/calculate_proportion.R b/R/calculate_proportion.R index 3249636..1ba4901 100644 --- a/R/calculate_proportion.R +++ b/R/calculate_proportion.R @@ -6,6 +6,12 @@ #' #' @inheritParams plot_biomass_index #' @inheritParams VAST::make_data +#' @param sample_size_method Method used to calculate the variance in proportions, which is then converted to an approximately equivalent multinomial sample size that can be used as input-sample-size in a subsequent stock assessment model. Options are: +#' \describe{ +#' \item{\code{sample_size_method="Taylor_series"}}{a Taylor-series approximation to the ratio of X/(X+Y) where X is the category-specific index and Y is the index for for all other categories} +#' \item{\code{sample_size_method="sample_based"}}{Taking samples from the joint precision of fixed and random effects, calculating proportions for each sample, and then computing the variance across those samples} +#' } +#' The sample-based approximation is expected to have higher variance and therefore lower approximate sample size. However, it may also have poor performance in cases when variance estimates are imprecise (such that the multivariate-normal approximation to joint precision is poor), and has not been thoroughly groundtested in real-world cases. #' @param Index output from \code{FishStatsUtils::plot_biomass_index} #' @param ... list of arguments to pass to \code{plot_index} #' @@ -18,15 +24,17 @@ #' @references For details regarding multivariate index standardization and expansion see \url{https://cdnsciencepub.com/doi/full/10.1139/cjfas-2018-0015} #' @export calculate_proportion <- -function( TmbData, +function( fit, + TmbData, Index, Expansion_cz = NULL, year_labels = NULL, years_to_plot = NULL, strata_names = NULL, category_names = NULL, + sample_size_method = c("Taylor_series","sample_based"), plot_legend = ifelse(TmbData$n_l>1,TRUE,FALSE), - DirName = paste0(getwd(),"/"), + DirName = getwd(), PlotName = "Proportion.png", PlotName2 = "Average.png", interval_width = 1, @@ -34,6 +42,7 @@ function( TmbData, height = 6, xlab = "Category", ylab = "Proportion", + n_samples = 250, ... ){ # Warnings and errors @@ -55,19 +64,32 @@ function( TmbData, Index_tl = apply(Index_ctl,MARGIN=2:3,FUN=sum) SE_Index_tl = sqrt(apply(SE_Index_ctl^2,MARGIN=2:3,FUN=sum,na.rm=TRUE)) - # Approximate variance for proportions, and effective sample size - Neff_ctl = var_Prop_ctl = array(NA,dim=dim(Prop_ctl)) - for( cI in 1:dim(var_Prop_ctl)[1]){ - for( tI in 1:dim(var_Prop_ctl)[2]){ - for( lI in 1:dim(var_Prop_ctl)[3]){ - # Original version - #var_Prop_ctl[cI,tI,lI] = Index_ctl[cI,tI,lI]^2/Index_tl[tI,lI]^2 * (SE_Index_ctl[cI,tI,lI]^2/Index_ctl[cI,tI,lI]^2 + SE_Index_tl[tI,lI]^2/Index_tl[tI,lI]^2 ) - # Slightly extended version - var_Prop_ctl[cI,tI,lI] = Index_ctl[cI,tI,lI]^2/Index_tl[tI,lI]^2 * (SE_Index_ctl[cI,tI,lI]^2/Index_ctl[cI,tI,lI]^2 - 2*SE_Index_ctl[cI,tI,lI]^2/(Index_ctl[cI,tI,lI]*Index_tl[tI,lI]) + SE_Index_tl[tI,lI]^2/Index_tl[tI,lI]^2 ) - var_Prop_ctl[cI,tI,lI] = ifelse( Index_ctl[cI,tI,lI]==0, 0, var_Prop_ctl[cI,tI,lI] ) # If dividing by zero, replace with 0 - # Covert to effective sample size - Neff_ctl[cI,tI,lI] = Prop_ctl[cI,tI,lI] * (1-Prop_ctl[cI,tI,lI]) / var_Prop_ctl[cI,tI,lI] - }}} + if( tolower(sample_size_method[1]) == "taylor_series" ){ + # Approximate variance for proportions, and effective sample size + Neff_ctl = var_Prop_ctl = array(NA,dim=dim(Prop_ctl)) + for( cI in 1:dim(var_Prop_ctl)[1]){ + for( tI in 1:dim(var_Prop_ctl)[2]){ + for( lI in 1:dim(var_Prop_ctl)[3]){ + # Original version + #var_Prop_ctl[cI,tI,lI] = Index_ctl[cI,tI,lI]^2/Index_tl[tI,lI]^2 * (SE_Index_ctl[cI,tI,lI]^2/Index_ctl[cI,tI,lI]^2 + SE_Index_tl[tI,lI]^2/Index_tl[tI,lI]^2 ) + # Slightly extended version + var_Prop_ctl[cI,tI,lI] = Index_ctl[cI,tI,lI]^2/Index_tl[tI,lI]^2 * (SE_Index_ctl[cI,tI,lI]^2/Index_ctl[cI,tI,lI]^2 - 2*SE_Index_ctl[cI,tI,lI]^2/(Index_ctl[cI,tI,lI]*Index_tl[tI,lI]) + SE_Index_tl[tI,lI]^2/Index_tl[tI,lI]^2 ) + var_Prop_ctl[cI,tI,lI] = ifelse( Index_ctl[cI,tI,lI]==0, 0, var_Prop_ctl[cI,tI,lI] ) # If dividing by zero, replace with 0 + # Covert to effective sample size + #Neff_ctl[cI,tI,lI] = Prop_ctl[cI,tI,lI] * (1-Prop_ctl[cI,tI,lI]) / var_Prop_ctl[cI,tI,lI] + }}} + }else{ + Index_ctlr = sample_variable( Sdreport = fit$parameter_estimates$SD, + Obj = fit$tmb_list$Obj, + variable_name = "Index_ctl", + n_samples = n_samples, + sample_fixed = TRUE ) + Prop_ctlr = Index_ctlr / outer( rep(1,dim(Index_ctlr)[1]), apply(Index_ctlr,MARGIN=2:4,FUN=sum) ) + var_Prop_ctl = apply(Prop_ctlr, MARGIN=1:3, FUN=var) + } + + # Covert to effective sample size + Neff_ctl = Prop_ctl * (1-Prop_ctl) / var_Prop_ctl # Median effective sample size across categories Neff_tl = apply(Neff_ctl, MARGIN=2:3, FUN=median, na.rm=TRUE) diff --git a/R/convert_shapefile.R b/R/convert_shapefile.R index 5b6e95f..0a1f1ac 100644 --- a/R/convert_shapefile.R +++ b/R/convert_shapefile.R @@ -30,7 +30,9 @@ convert_shapefile = function( file_path, area_tolerance = 0.05, ... ){ - shapefile_input = rgdal::readOGR( file_path, verbose=FALSE, p4s=projargs_for_shapefile ) + #shapefile_input = rgdal::readOGR( file_path, verbose=FALSE, p4s=projargs_for_shapefile ) + shapefile_input = sf::st_read( file_path ) + st_crs(shapefile_input) = st_crs(projargs_for_shapefile) # rgdal::writeOGR message("Reading shapefile with projargs: ", print(shapefile_input@proj4string)) # raster::shapefile(.) has simplified read-write interface for future reference diff --git a/R/deprecated.R b/R/deprecated.R index 7b2db26..32cc04a 100644 --- a/R/deprecated.R +++ b/R/deprecated.R @@ -1049,8 +1049,10 @@ function( Lon, Lat, crs=NA ){ # SEE: https://github.com/nwfsc-assess/geostatistical_delta-GLMM/issues/25#issuecomment-345825230 # Attach package - require(rgdal) - on.exit( detach("package:rgdal") ) + #require(rgdal) + #on.exit( detach("package:rgdal") ) + require(sp) + on.exit( detach("package:sp") ) # Transform dstart<-data.frame(lon=Lon, lat=Lat) # that's the object diff --git a/R/fit_model.R b/R/fit_model.R index 06708fe..dcecc2d 100644 --- a/R/fit_model.R +++ b/R/fit_model.R @@ -43,7 +43,7 @@ #' @param year_labels character vector specifying names for labeling times \code{t_i} #' @param ... additional arguments to pass to \code{\link{make_extrapolation_info}}, \code{\link{make_spatial_info}}, \code{\link[VAST]{make_data}}, \code{\link[VAST]{make_model}}, or \code{\link[TMBhelper]{fit_tmb}}, #' where arguments are matched by name against each function. If an argument doesn't match, it is still passed to \code{\link[VAST]{make_data}}. Note that \code{\link{make_spatial_info}} -#' passes named arguments to \code{\link[INLA]{inla.mesh.create}}. +#' passes named arguments to \code{\link[fmesher]{fm_mesh_2d}}. #' #' @return Object of class \code{fit_model}, containing formatted inputs and outputs from VAST #' \describe{ @@ -105,7 +105,7 @@ function( settings, a_i, c_iz = rep(0,length(b_i)), v_i = rep(0,length(b_i)), - working_dir = paste0(getwd(),"/"), + working_dir = tempdir(), X1config_cp = NULL, X2config_cp = NULL, covariate_data, @@ -180,8 +180,9 @@ function( settings, DirPath = working_dir, Save_Results = TRUE, fine_scale = settings$fine_scale, - knot_method = settings$knot_method) - spatial_args_input = combine_lists( input=extra_args, default=spatial_args_default, args_to_use=c(formalArgs(make_spatial_info),formalArgs(INLA::inla.mesh.create)) ) + knot_method = settings$knot_method, + mesh_package = settings$mesh_package ) + spatial_args_input = combine_lists( input=extra_args, default=spatial_args_default, args_to_use=c(formalArgs(make_spatial_info),formalArgs(fmesher::fm_mesh_2d)) ) spatial_list = do.call( what=make_spatial_info, args=spatial_args_input ) }else{ spatial_args_input = NULL @@ -507,7 +508,7 @@ plot.fit_model <- function(x, what="results", ...) #' @inheritParams DHARMa::plotResiduals #' #' @param x Output from \code{\link{fit_model}} -#' @param what String indicating what to summarize; options are `density` or `residuals` +#' @param what String indicating what to summarize; options are `density`, `index` or `residuals` #' @param n_samples Number of samples used when \code{what="residuals"} #' @param ... additional arguments passed to \code{\link[DHARMa]{plotResiduals}} when \code{what="residuals"} #' @@ -534,34 +535,41 @@ function( x, year_labels = year_labels, category_names = category_names ) - if( tolower(what) == "density" ){ + if( tolower(what) %in% c("density","index") ){ # Load location of extrapolation-grid ans[["extrapolation_grid"]] = print( x$extrapolation_list, quiet=TRUE ) # Load density estimates - if( any(c("D_gct","D_gcy") %in% names(x$Report)) ){ - if("D_gcy" %in% names(x$Report)) Dvar_name = "D_gcy" - if("D_gct" %in% names(x$Report)) Dvar_name = "D_gct" - ans[["Density_array"]] = x$Report[[Dvar_name]] - if( !( x$settings$fine_scale==TRUE | x$spatial_list$Method=="Stream_network" ) ){ - index_tmp = x$spatial_list$NN_Extrap$nn.idx[ which(x$extrapolation_list[["Area_km2_x"]]>0), 1 ] - ans[["Density_array"]] = ans[["Density_array"]][ index_tmp,,,drop=FALSE] - } - if( any(sapply(dimnames(ans[["Density_array"]]),FUN=is.null)) ){ - dimnames(ans[["Density_array"]]) = list( rownames(ans[["extrapolation_grid"]]), paste0("Category_",1:dim(ans[["Density_array"]])[[2]]), x$year_labels ) - } - # Expand as grid - Density_dataframe = expand.grid("Grid"=1:dim(ans[["Density_array"]])[[1]], "Category"=dimnames(ans[["Density_array"]])[[2]], "Year"=dimnames(ans[["Density_array"]])[[3]]) - Density_dataframe = cbind( Density_dataframe, ans[["extrapolation_grid"]][Density_dataframe[,'Grid'],], "Density"=as.vector(ans[["Density_array"]]) ) - ans[["Density_dataframe"]] = Density_dataframe - ans[['year_labels']] = x[['year_labels']] - rownames(Density_dataframe) = NULL - cat("\n### Printing head of and tail `Density_dataframe`, and returning data frame in output object\n") - print(head(Density_dataframe)) - print(tail(Density_dataframe)) - }else{ - stop( "`summary.fit_model` not implemented for the version of `VAST` being used" ) + if( tolower(what) == "density" ){ + ans[["Density_array"]] = x$Report[["D_gct"]] + }else if( tolower(what) == "index" ){ + ans[["Density_array"]] = x$Report[["Index_gctl"]] + } + + # Exclude boundary knots + if( !( x$settings$fine_scale==TRUE | x$spatial_list$Method=="Stream_network" ) ){ + index_tmp = x$spatial_list$NN_Extrap$nn.idx[ which(x$extrapolation_list[["Area_km2_x"]]>0), 1 ] + ans[["Density_array"]] = ans[["Density_array"]][ index_tmp,,,drop=FALSE] } + # Error check + if( any(sapply(dimnames(ans[["Density_array"]]),FUN=is.null)) ){ + stop("`summary.fit_model` assumes that arrays are labeled") + } + + # Expand as grid + Density_dataframe = expand.grid( dimnames(ans[["Density_array"]]) ) + Density_dataframe = cbind( Density_dataframe, ans[["extrapolation_grid"]][Density_dataframe[,'Site'],], as.vector(ans[["Density_array"]]) ) + colnames(Density_dataframe)[ncol(Density_dataframe)] = tolower(what) + + # Save output + ans[["Density_dataframe"]] = Density_dataframe + ans[['year_labels']] = x[['year_labels']] + rownames(Density_dataframe) = NULL + + # Print to terminal + cat("\n### Printing head of and tail `Density_dataframe`, and returning data frame in output object\n") + print(head(Density_dataframe)) + print(tail(Density_dataframe)) } # Residuals @@ -601,10 +609,10 @@ function( x, # Run DHARMa # Adding jitters because DHARMa version 0.3.2.0 sometimes still throws an error method="traditional" and integer=FALSE without jitters - dharmaRes = DHARMa::createDHARMa(simulatedResponse=strip_units(b_iz) + 1e-10*array(rnorm(prod(dim(b_iz))),dim=dim(b_iz)), - observedResponse=strip_units(x$data_list$b_i) + 1e-10*rnorm(length(x$data_list$b_i)), - fittedPredictedResponse=strip_units(x$Report$D_i), - integer=FALSE) + dharmaRes = DHARMa::createDHARMa( simulatedResponse = strip_units(b_iz) + 1e-10*array(rnorm(prod(dim(b_iz))),dim=dim(b_iz)), + observedResponse = strip_units(x$data_list$b_i) + 1e-10*rnorm(length(x$data_list$b_i)), + fittedPredictedResponse = strip_units(x$Report$D_i), + integer = FALSE) #dharmaRes = DHARMa::createDHARMa(simulatedResponse=strip_units(b_iz), # observedResponse=strip_units(x$data_list$b_i), # fittedPredictedResponse=strip_units(x$Report$D_i), @@ -660,7 +668,7 @@ function( x, if( is.null(working_dir) ){ plot_dharma(dharmaRes, ...) }else if(!is.na(working_dir) ){ - png(file=paste0(working_dir,"quantile_residuals.png"), width=8, height=4, res=200, units='in') + png(file=file.path(working_dir,"quantile_residuals.png"), width=8, height=4, res=200, units='in') plot_dharma(dharmaRes, form=form, ...) dev.off() } @@ -750,7 +758,7 @@ predict.fit_model <- function(x, new_covariate_data = NULL, new_catchability_data = NULL, do_checks = TRUE, - working_dir = paste0(getwd(),"/") ) + working_dir = getwd() ) { message("`predict.fit_model(.)` is in beta-testing, and please explore results carefully prior to using") diff --git a/R/inla.barrier.fem.copy.R b/R/inla.barrier.fem.copy.R new file mode 100644 index 0000000..61d68d8 --- /dev/null +++ b/R/inla.barrier.fem.copy.R @@ -0,0 +1,123 @@ +inla.barrier.fem.copy <- +function (mesh, barrier.triangles, Omega = NULL) +{ + stopifnot(inherits(mesh, "inla.mesh")) + if (missing(barrier.triangles) && is.null(Omega)) + stop("Input barrier triangles") + if (missing(barrier.triangles)) { + } + else { + barrier.triangles <- unique(barrier.triangles) + t <- length(mesh$graph$tv[, 1]) + remaining <- setdiff(1:t, barrier.triangles) + if (!is.null(Omega)) + warning("Omega is replaced by barrier.triangles") + Omega <- list(remaining, barrier.triangles) + } + dt.fem.white <- function(mesh, subdomain) { + Ck <- rep(0, mesh$n) + for (t in subdomain) { + px <- mesh$graph$tv[t, ] + temp <- mesh$loc[px, ] + p1 <- t(t(temp[1, c(1, 2)])) + p2 <- t(t(temp[2, c(1, 2)])) + p3 <- t(t(temp[3, c(1, 2)])) + Ts <- cbind(p2 - p1, p3 - p1) + area <- abs(det(Ts)) * 0.5 + for (i in 1:3) { + Ck[px[i]] <- Ck[px[i]] + area + } + } + return(Ck) + } + dt.fem.identity <- function(mesh) { + len <- length(mesh$graph$tv[, 1]) + index.i <- rep(0, len * 6) + index.j <- rep(0, len * 6) + Aij <- rep(0, len * 6) + counter <- 1 + for (t in 1:len) { + px <- mesh$graph$tv[t, ] + temp <- mesh$loc[px, ] + p1 <- t(t(temp[1, c(1, 2)])) + p2 <- t(t(temp[2, c(1, 2)])) + p3 <- t(t(temp[3, c(1, 2)])) + Ts <- cbind(p2 - p1, p3 - p1) + twiceArea <- abs(det(Ts)) + for (i in 1:3) { + index.i[counter] <- px[i] + index.j[counter] <- px[i] + Aij[counter] <- (twiceArea) * 1/12 + counter <- counter + 1 + } + for (i in 1:2) { + for (j in (i + 1):3) { + index.i[counter] <- px[i] + index.j[counter] <- px[j] + Aij[counter] <- (twiceArea) * 1/24 + counter <- counter + 1 + index.i[counter] <- px[j] + index.j[counter] <- px[i] + Aij[counter] <- (twiceArea) * 1/24 + counter <- counter + 1 + } + } + } + I <- Matrix::sparseMatrix(i = index.i, j = index.j, x = Aij, + dims = c(mesh$n, mesh$n), repr = "T") + return(I) + } + dt.fem.laplace <- function(mesh, subdomain) { + Nphix <- rbind(c(-1, -1), c(1, 0), c(0, 1)) + len <- length(subdomain) + index.i <- rep(0, len * 9) + index.j <- rep(0, len * 9) + Aij <- rep(0, len * 9) + counter <- 1 + for (tri in subdomain) { + px <- mesh$graph$tv[tri, ] + temp <- mesh$loc[px, ] + p1 <- t(t(temp[1, c(1, 2)])) + p2 <- t(t(temp[2, c(1, 2)])) + p3 <- t(t(temp[3, c(1, 2)])) + Ts <- cbind(p2 - p1, p3 - p1) + TTTinv <- solve(t(Ts) %*% Ts) + area <- abs(det(Ts)) * 0.5 + for (k in 1:3) { + for (m in 1:3) { + tmp <- (3 * m + k - 4) * length(subdomain) + index.i[(tmp + counter)] <- px[k] + index.j[(tmp + counter)] <- px[m] + Aij[(tmp + counter)] <- area * Nphix[k, c(1, + 2)] %*% TTTinv %*% as.matrix(Nphix[m, c(1, + 2)]) + } + } + counter <- counter + 1 + } + Dk <- Matrix::sparseMatrix(i = index.i, j = index.j, x = Aij, + dims = c(mesh$n, mesh$n), repr = "T") + return(Dk) + } + xi <- length(Omega) + #if (require(INLAspacetime)) { + # warning("Using implementation from the `INLAspacetime` package") + # fem <- INLAspacetime::mesh2fem.barrier(mesh = mesh, barrier.triangles = Omega[[2L]]) + #} + #else { + #warning(paste("Please install the `INLAspacetime` package\n", + # "which contains an implementation that runs faster!")) + fem <- list() + fem$I <- dt.fem.identity(mesh) + fem$D <- list() + fem$C <- list() + for (k in 1:xi) { + fem$D[[k]] <- dt.fem.laplace(mesh, Omega[[k]]) + } + for (k in 1:xi) { + fem$C[[k]] <- dt.fem.white(mesh, Omega[[k]]) + } + fem$hdim <- xi + #} + return(fem) +} diff --git a/R/make_extrapolation_info.R b/R/make_extrapolation_info.R index a4048eb..3700a57 100644 --- a/R/make_extrapolation_info.R +++ b/R/make_extrapolation_info.R @@ -111,7 +111,7 @@ make_extrapolation_info = function( Region, nstart = 100, area_tolerance = 0.05, backwards_compatible_kmeans = FALSE, - DirPath = paste0(getwd(),"/"), + DirPath = getwd(), ... ){ # Note: flip_around_dateline must appear in arguments for argument-matching in fit_model @@ -196,7 +196,7 @@ make_extrapolation_info = function( Region, "rockfish_recruitment_coastwide","rockfish_recruitment_core") if( toupper(Region[rI]) %in% toupper(Shapefile_set) ){ if( Region[rI]=="SP-ARSA" ) stop("There's some problem with `SP-ARSA` which precludes it's use") - Conversion = convert_shapefile( file_path=paste0(system.file("region_shapefiles",package="FishStatsUtils"),"/",toupper(Region[rI]),"/Shapefile.shp"), + Conversion = convert_shapefile( file_path=file.path(system.file("region_shapefiles",package="FishStatsUtils"), toupper(Region[rI]), "Shapefile.shp"), projargs_for_shapefile="+proj=longlat +ellps=WGS84 +no_defs", projargs=projargs, grid_dim_km=grid_dim_km, area_tolerance=area_tolerance, ... ) Extrapolation_List = list( "a_el"=matrix(Conversion$extrapolation_grid[,'Area_km2'],ncol=1), "Data_Extrap"=Conversion$extrapolation_grid, "zone"=NA, "projargs"=Conversion$projargs, "flip_around_dateline"=FALSE, "Area_km2_x"=Conversion$extrapolation_grid[,'Area_km2']) diff --git a/R/make_kmeans.R b/R/make_kmeans.R index 600b545..065f22b 100644 --- a/R/make_kmeans.R +++ b/R/make_kmeans.R @@ -28,7 +28,7 @@ function( n_x, nstart = 100, randomseed = 1, iter.max = 1000, - DirPath = paste0(getwd(),"/"), + DirPath = getwd(), Save_Results = TRUE, kmeans_purpose = "spatial", backwards_compatible_kmeans = FALSE ){ @@ -76,8 +76,8 @@ function( n_x, }else{ if(tmpfile %in% list.files(DirPath) ){ # If previously saved knots are available - load( file=paste0(DirPath,"/",tmpfile)) - message( "Loaded from ", DirPath, "/", tmpfile) + load( file=file.path(DirPath, tmpfile)) + message( "Loaded from ", file.path(DirPath, tmpfile) ) }else{ # Multiple runs to find optimal knots message("Using ", nstart, " iterations to find optimal ", @@ -97,8 +97,8 @@ function( n_x, } message("Iter=", nstart, ': Final=', round(Kmeans$tot.withinss,0), " after ", tries, " iterations") if(Save_Results==TRUE){ - save( Kmeans, file=paste0(DirPath, tmpfile)) - message( "Results saved to ", DirPath, tmpfile, "\n for subsequent runs by default (delete it to override)") + save( Kmeans, file=file.path(DirPath, tmpfile)) + message( "Results saved to ", file.path(DirPath, tmpfile), "\n for subsequent runs by default (delete it to override)") } } } diff --git a/R/make_mesh.R b/R/make_mesh.R index e99547f..d1184d4 100644 --- a/R/make_mesh.R +++ b/R/make_mesh.R @@ -20,24 +20,43 @@ function( loc_x, anisotropic_mesh = NULL, fine_scale = FALSE, map_data, + mesh_package = c("INLA","fmesher"), ...){ ####################### # Create the anisotropic SPDE mesh using 2D coordinates ####################### + mesh_package = match.arg( mesh_package ) # 2D coordinates SPDE if( is.null(anisotropic_mesh)){ if( fine_scale==FALSE ){ - anisotropic_mesh = INLA::inla.mesh.create( loc_x, plot.delay=NULL, ...) + if( mesh_package=="INLA" ){ + if(!require(INLA)) stop("Please install INLA or switch to `settings$mesh_package=`fmesher`") + anisotropic_mesh = INLA::inla.mesh.create( loc_x, plot.delay=NULL, ...) + }else{ + anisotropic_mesh = fm_mesh_2d( loc_x, ... ) + } }else{ - loc_z = rbind( loc_x, loc_g, loc_i ) - outer_hull = INLA::inla.nonconvex.hull( as.matrix(loc_z), convex = -0.05, concave = -0.05) - anisotropic_mesh = INLA::inla.mesh.create( loc_x, plot.delay=NULL, boundary=outer_hull, ...) + if( mesh_package=="INLA" ){ + loc_z = rbind( loc_x, loc_g, loc_i ) + if(!require(INLA)) stop("Please install INLA or switch to `settings$mesh_package=`fmesher`") + outer_hull = INLA::inla.nonconvex.hull( as.matrix(loc_z), convex = -0.05, concave = -0.05) + anisotropic_mesh = INLA::inla.mesh.create( loc_x, plot.delay=NULL, boundary=outer_hull, ...) + }else{ + #outer_hull = fm_nonconvex_hull( as.matrix(loc_z), convex = -0.05, concave = -0.05 ) + anisotropic_mesh = fm_mesh_2d( loc_x, ... ) # , boundary=outer_hull, ... ) + } } } - anisotropic_spde = INLA::inla.spde2.matern(anisotropic_mesh, alpha=2) + #anisotropic_spde = INLA::inla.spde2.matern(anisotropic_mesh, alpha=2) + anisotropic_spde = fm_fem( anisotropic_mesh, order=2 ) + anisotropic_spde$param.inla = list( "M0" = anisotropic_spde$c0, + "M1" = anisotropic_spde$g1, + "M2" = anisotropic_spde$g2) + anisotropic_spde$mesh = anisotropic_mesh + anisotropic_spde$n.spde = anisotropic_mesh$n # Pre-processing in R for anisotropy Dset = 1:2 @@ -54,8 +73,10 @@ function( loc_x, # Barriers don't affect projection matrix A # Obtain polygon for water if( missing(map_data) ){ - map_data = rnaturalearth::ne_countries( scale=switch("medium", "low"=110, "medium"=50, "high"=10, 50) ) - attr(map_data,"proj4string") = sp::CRS("+proj=longlat +datum=WGS84") + map_data = rnaturalearth::ne_countries( scale=50, returnclass="sf" ) + #map_data = sf::as_Spatial( map_data ) + #attr(map_data,"proj4string") = sp::CRS("+proj=longlat +datum=WGS84") + #proj4string(map_data) = sp::CRS("+proj=longlat +datum=WGS84") } # Calculate centroid of each triangle in mesh and convert to SpatialPoints @@ -65,12 +86,15 @@ function( loc_x, temp = anisotropic_mesh$loc[ anisotropic_mesh$graph$tv[tri_index,], ] posTri[tri_index,] = colMeans(temp)[c(1,2)] } - posTri = sp::SpatialPoints(posTri, proj4string=sp::CRS(Extrapolation_List$projargs) ) - posTri = sp::spTransform(posTri, CRSobj=map_data@proj4string ) + #posTri = sp::SpatialPoints(posTri, proj4string=sp::CRS(Extrapolation_List$projargs) ) + #posTri = sp::spTransform(posTri, CRSobj=sp::CRS(map_data) ) + posTri = sf::st_sfc( sf::st_multipoint(posTri), crs=sf::st_crs(Extrapolation_List$projargs) ) + posTri = sf::st_transform( posTri, crs=sf::st_crs(map_data) ) # Calculate set of triangles barrier.triangles with centroid over land if( Method == "Barrier" ){ - anisotropic_mesh_triangles_over_land = unlist(sp::over(map_data, posTri, returnList=TRUE)) + #anisotropic_mesh_triangles_over_land = unlist(sp::over(map_data, posTri, returnList=TRUE)) + anisotropic_mesh_triangles_over_land = unlist(sf::st_intersects(map_data, posTri)) }else{ anisotropic_mesh_triangles_over_land = vector() } @@ -80,7 +104,7 @@ function( loc_x, # Create Barrier object if requested # Don't do this unless necessary, because it sometimes throws an error #Diagnose issues: assign("anisotropic_mesh", anisotropic_mesh, envir = .GlobalEnv) - barrier_finite_elements = INLA:::inla.barrier.fem(mesh=anisotropic_mesh, + barrier_finite_elements = FishStatsUtils:::inla.barrier.fem.copy(mesh=anisotropic_mesh, barrier.triangles=anisotropic_mesh_triangles_over_land) barrier_list = list(C0 = barrier_finite_elements$C[[1]], C1 = barrier_finite_elements$C[[2]], @@ -104,19 +128,32 @@ function( loc_x, isotropic_mesh = anisotropic_mesh } if(Method %in% c("Spherical_mesh")){ - loc_isotropic_mesh = INLA::inla.mesh.map(loc_x, projection="longlat", inverse=TRUE) # Project from lat/long to mesh coordinates - isotropic_mesh = INLA::inla.mesh.create( loc_isotropic_mesh, plot.delay=NULL, ...) + #loc_isotropic_mesh = INLA::inla.mesh.map(loc_x, projection="longlat", inverse=TRUE) # Project from lat/long to mesh coordinates + #isotropic_mesh = INLA::inla.mesh.create( loc_isotropic_mesh, plot.delay=NULL, ...) + stop("Method `Spherical_mesh` no longer supported") } - isotropic_spde = INLA::inla.spde2.matern(isotropic_mesh, alpha=2) + #isotropic_spde = INLA::inla.spde2.matern(isotropic_mesh, alpha=2) + isotropic_spde = fm_fem(isotropic_mesh, order=2) + isotropic_spde$param.inla = list( "M0" = isotropic_spde$c0, + "M1" = isotropic_spde$g1, + "M2" = isotropic_spde$g2) + isotropic_spde$mesh = isotropic_mesh + isotropic_spde$n.spde = isotropic_mesh$n #################### # Return stuff #################### #if( isotropic_mesh$n != anisotropic_mesh$n ) stop("Check `Calc_Anisotropic_Mesh` for problem") - Return = list("loc_x"=loc_x, "loc_isotropic_mesh"=loc_isotropic_mesh, "isotropic_mesh"=isotropic_mesh, - "isotropic_spde"=isotropic_spde, "anisotropic_mesh"=anisotropic_mesh, "anisotropic_spde"=anisotropic_spde, - "Tri_Area"=Tri_Area, "TV"=TV, "E0"=E0, "E1"=E1, "E2"=E2, - "anisotropic_mesh_triangles_over_land"=anisotropic_mesh_triangles_over_land, "barrier_list"=barrier_list ) + Return = list( "loc_x"=loc_x, + "loc_isotropic_mesh"=loc_isotropic_mesh, + "isotropic_mesh"=isotropic_mesh, + "isotropic_spde"=isotropic_spde, + "anisotropic_mesh"=anisotropic_mesh, + "anisotropic_spde"=anisotropic_spde, + "Tri_Area"=Tri_Area, + "TV"=TV, "E0"=E0, "E1"=E1, "E2"=E2, + "anisotropic_mesh_triangles_over_land"=anisotropic_mesh_triangles_over_land, + "barrier_list"=barrier_list ) return(Return) } diff --git a/R/make_settings.R b/R/make_settings.R index 49b3193..23e8586 100644 --- a/R/make_settings.R +++ b/R/make_settings.R @@ -54,7 +54,8 @@ function( n_x, n_categories, VamConfig, max_cells, - knot_method ){ + knot_method, + mesh_package ){ # Get version if(missing(Version)) Version = FishStatsUtils::get_latest_version() @@ -83,6 +84,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "samples" if(missing(max_cells)) max_cells = Inf + if(missing(mesh_package)) mesh_package = "INLA" } # Current @@ -103,6 +105,26 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "grid" if(missing(max_cells)) max_cells = max( 2000, n_x*10 ) + if(missing(mesh_package)) mesh_package = "INLA" + } + if( tolower(purpose) == "index3" ){ + purpose_found = TRUE + if( convert_version_name(Version) >= convert_version_name("VAST_v7_0_0") ){ + if(missing(FieldConfig)) FieldConfig = matrix( "IID", ncol=2, nrow=3, dimnames=list(c("Omega","Epsilon","Beta"),c("Component_1","Component_2")) ) + }else{ + if(missing(FieldConfig)) FieldConfig = c("Omega1"="IID", "Epsilon1"="IID", "Omega2"="IID", "Epsilon2"="IID") + } + if(missing(RhoConfig)) RhoConfig = c("Beta1"=0, "Beta2"=0, "Epsilon1"=0, "Epsilon2"=0) + if(missing(VamConfig)) VamConfig = c("Method"=0, "Rank"=0, "Timing"=0) + if(missing(OverdispersionConfig)) OverdispersionConfig = c("Eta1"=0, "Eta2"=0) + if(missing(ObsModel)) ObsModel = c(2,1) + if(missing(bias.correct)) bias.correct = TRUE + if(missing(treat_nonencounter_as_zero)) treat_nonencounter_as_zero = TRUE + if(missing(Options)) Options = c( "Calculate_Range"=TRUE, "Calculate_effective_area"=TRUE, "treat_nonencounter_as_zero"=treat_nonencounter_as_zero ) + if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) + if(missing(knot_method)) knot_method = "grid" + if(missing(max_cells)) max_cells = max( 2000, n_x*10 ) + if(missing(mesh_package)) mesh_package = "fmesher" } ################### @@ -126,6 +148,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "samples" if(missing(max_cells)) max_cells = Inf + if(missing(mesh_package)) mesh_package = "INLA" } ################### @@ -149,6 +172,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Bratio_cyl", "Index_ctl", "Bratio_ctl" ) if(missing(knot_method)) knot_method = "samples" if(missing(max_cells)) max_cells = Inf + if(missing(mesh_package)) mesh_package = "INLA" } ################### @@ -171,6 +195,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "samples" if(missing(max_cells)) max_cells = Inf + if(missing(mesh_package)) mesh_package = "INLA" } ################### @@ -195,6 +220,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "samples" if(missing(max_cells)) max_cells = Inf + if(missing(mesh_package)) mesh_package = "INLA" } if( tolower(purpose) %in% c("eof2") ){ @@ -213,6 +239,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "grid" if(missing(max_cells)) max_cells = max( 2000, n_x*10 ) + if(missing(mesh_package)) mesh_package = "INLA" } if( tolower(purpose) %in% c("eof3") ){ @@ -230,6 +257,7 @@ function( n_x, if(missing(vars_to_correct)) vars_to_correct = c( "Index_cyl", "Index_ctl" ) if(missing(knot_method)) knot_method = "grid" if(missing(max_cells)) max_cells = max( 2000, n_x*10 ) + if(missing(mesh_package)) mesh_package = "INLA" } ################### @@ -268,6 +296,7 @@ function( n_x, "Method" = Method, "use_anisotropy" = use_anisotropy, "fine_scale" = fine_scale, - "bias.correct" = bias.correct ) + "bias.correct" = bias.correct, + "mesh_package" = mesh_package ) return(settings) } diff --git a/R/make_spatial_info.R b/R/make_spatial_info.R index eaefc05..6450dc2 100644 --- a/R/make_spatial_info.R +++ b/R/make_spatial_info.R @@ -68,11 +68,12 @@ make_spatial_info = function( n_x, iter.max = 1000, randomseed = 1, nstart = 100, - DirPath = paste0(getwd(),"/"), + DirPath = getwd(), Save_Results = FALSE, LON_intensity, LAT_intensity, backwards_compatible_kmeans = FALSE, + mesh_package = "INLA", ... ){ # Deprecated options @@ -107,8 +108,14 @@ make_spatial_info = function( n_x, Grid_bounds = (grid_size_km/110) * apply(loc_e/(grid_size_km/110), MARGIN=2, FUN=function(vec){trunc(range(vec))+c(0,1)}) # Calculate k-means centroids - if(is.null(Kmeans)) Kmeans = make_kmeans(n_x=n_x, - loc_orig=loc_i[,c("Lon", "Lat")], randomseed=randomseed, kmeans_purpose='spatial', backwards_compatible_kmeans=backwards_compatible_kmeans, ... ) + if(is.null(Kmeans)){ + Kmeans = make_kmeans( n_x = n_x, + loc_orig = loc_i[,c("Lon", "Lat")], + randomseed = randomseed, + kmeans_purpose = 'spatial', + backwards_compatible_kmeans = backwards_compatible_kmeans, + ... ) + } # Calculate grid for 2D AR1 process loc_grid = expand.grid( 'Lon'=seq(Grid_bounds[1,1],Grid_bounds[2,1],by=grid_size_LL), 'Lat'=seq(Grid_bounds[1,2],Grid_bounds[2,2],by=grid_size_LL) ) @@ -146,6 +153,9 @@ make_spatial_info = function( n_x, } if( Method == "Stream_network" ){ knot_i = Extrapolation_List$Data_Extrap[,"child_i"] + if( length(knot_i) != nrow(loc_i) ){ + stop("Check `input_grid` input") + } loc_x = project_coordinates( X=Network_sz_LL[,"Lon"], Y=Network_sz_LL[,"Lat"], projargs=Extrapolation_List$projargs ) colnames(loc_x) = c('E_km', 'N_km') } @@ -175,10 +185,10 @@ make_spatial_info = function( n_x, # Diagnose issues: assign("Kmeans", Kmeans, envir = .GlobalEnv) if(Method != "Stream_network"){ MeshList = make_mesh( Method=Method, loc_x=Kmeans$centers, loc_g=loc_g, loc_i=loc_i, Extrapolation_List=Extrapolation_List, - fine_scale=fine_scale, anisotropic_mesh=anisotropic_mesh, ... ) + fine_scale=fine_scale, anisotropic_mesh=anisotropic_mesh, mesh_package=mesh_package, ... ) }else{ MeshList = make_mesh( Method=Method, loc_x=loc_x, loc_g=loc_g, loc_i=loc_i, Extrapolation_List=Extrapolation_List, - fine_scale=fine_scale, anisotropic_mesh=anisotropic_mesh, ... ) + fine_scale=fine_scale, anisotropic_mesh=anisotropic_mesh, mesh_package=mesh_package, ... ) } # Deal with loc_s and latlon_s @@ -226,9 +236,11 @@ make_spatial_info = function( n_x, A_gs = as( diag(n_x), "dgTMatrix" ) } if( fine_scale==TRUE ){ - A_is = INLA::inla.spde.make.A( MeshList$anisotropic_mesh, loc=as.matrix(loc_i) ) + #A_is = INLA::inla.spde.make.A( MeshList$anisotropic_mesh, loc=as.matrix(loc_i) ) + A_is = fm_evaluator( MeshList$anisotropic_mesh, loc=as.matrix(loc_i) )$proj$A if( class(A_is)=="dgCMatrix" ) A_is = as( A_is, "dgTMatrix" ) - A_gs = INLA::inla.spde.make.A( MeshList$anisotropic_mesh, loc=as.matrix(loc_g) ) + #A_gs = INLA::inla.spde.make.A( MeshList$anisotropic_mesh, loc=as.matrix(loc_g) ) + A_gs = fm_evaluator( MeshList$anisotropic_mesh, loc=as.matrix(loc_g) )$proj$A if( class(A_gs)=="dgCMatrix" ) A_gs = as( A_gs, "dgTMatrix" ) Check_i = apply( A_is, MARGIN=1, FUN=function(vec){sum(vec>0)}) Check_g = apply( A_is, MARGIN=1, FUN=function(vec){sum(vec>0)}) diff --git a/R/marginaleffects.R b/R/marginaleffects.R index e27709d..c30514b 100644 --- a/R/marginaleffects.R +++ b/R/marginaleffects.R @@ -90,11 +90,13 @@ get_predict.fit_model = function(model, newdata, covariate, center=FALSE, ...){ # Return if( covariate %in% c("X1","X2") ){ - out = expand.grid( rowid=seq_along(yhat_ic[,1]), category=model$category_names ) - out$predicted = as.vector(yhat_ic) + out = expand.grid( rowid=seq_along(yhat_ic[,1]), + category=model$category_names ) + out$estimate = as.vector(yhat_ic) } if( covariate %in% c("Q1","Q2") ){ - out = data.frame( rowid=seq_along(yhat_ic[,1]), predicted=yhat_ic[,1] ) + out = data.frame( rowid=seq_along(yhat_ic[,1]), + estimate=yhat_ic[,1] ) } return(out) } diff --git a/R/plot_biomass_index.R b/R/plot_biomass_index.R index 8d16570..dbc44c7 100644 --- a/R/plot_biomass_index.R +++ b/R/plot_biomass_index.R @@ -29,7 +29,7 @@ #' @export plot_biomass_index <- function( fit, - DirName = paste0(getwd(),"/"), + DirName = getwd(), PlotName = "Index", interval_width = 1, years_to_plot = NULL, @@ -195,7 +195,7 @@ function( fit, if( all(c("Fratio_ct","Bratio_ctl") %in% names(par_hat)) ){ Par = list( mar=c(2,2,1,0), mgp=c(2,0.5,0), tck=-0.02, yaxs="i", oma=c(1,2,0,0), mfrow=mfrow, ... ) Col = colorRampPalette(colors=c("blue","purple","red")) - png( file=paste0(DirName,"/",PlotName,"-Status.png"), width=width, height=height, res=200, units="in") + png( file=file.path(DirName,paste0(PlotName,"-Status.png")), width=width, height=height, res=200, units="in") par( Par ) Array1_ct = abind::abind( "Estimate"=matrix(par_hat[["Bratio_ctl"]][,,1],nrow=TmbData$n_c,dimnames=dimnames(par_hat[["Bratio_ctl"]])[1:2]), "Std. Error"=matrix(par_SE[["Bratio_ctl"]][,,1],nrow=TmbData$n_c,dimnames=dimnames(par_hat[["Bratio_ctl"]])[1:2]), along=3 ) Array1_ct = ifelse( Array1_ct==0, NA, Array1_ct ) @@ -226,7 +226,7 @@ function( fit, "Estimate" = as.vector(par_hat[[index_name]]), "Std. Error for Estimate" = as.vector(par_SE[[index_name]]), "Std. Error for ln(Estimate)" = as.vector(par_SE[[log_index_name]]) ) - write.csv( Table, file=paste0(DirName,"/Index.csv"), row.names=FALSE) + write.csv( Table, file=file.path(DirName,"Index.csv"), row.names=FALSE) # Return stuff # Necessary to provide "log_Index_ctl" and "Index_ctl" for use in calculate_proportion, which has been fixed for zeros here diff --git a/R/plot_clusters.R b/R/plot_clusters.R index 49e62dd..dbdf239 100644 --- a/R/plot_clusters.R +++ b/R/plot_clusters.R @@ -34,7 +34,7 @@ function( fit, year_labels = fit$year_labels, category_names = fit$category_names, map_list = NULL, - working_dir = paste0(getwd(),"/"), + working_dir = getwd(), file_name = paste0("Class-",var_name), file_name2 = paste0("Class-",var_name,"-averages"), replace_Inf_with_NA = TRUE, @@ -127,7 +127,7 @@ function( fit, Ybar_kc = apply( Y_zc, MARGIN=2, FUN=function(y_z,class_z){tapply(y_z,INDEX=class_z,FUN=mean)}, class_z=Class_z ) ThorsonUtilities::save_fig if(yaxis_log==TRUE){f=exp}else{f=identity} - png( file=paste0(working_dir,file_name2,".png"), width=6, height=6, units="in", res=200 ) + png( file=file.path(working_dir,paste0(file_name2,".png")), width=6, height=6, units="in", res=200 ) par( mar=c(7,3,1,1), mgp=c(2,0.5,0), tck=-0.02 ) matplot( y = f(t(Ybar_kc)), #x = factor(colnames(Ybar_kc)), diff --git a/R/plot_data.R b/R/plot_data.R index b13620c..f23df60 100644 --- a/R/plot_data.R +++ b/R/plot_data.R @@ -23,7 +23,7 @@ function( Extrapolation_List, Lat_i = Data_Geostat[,'Lat'], Lon_i = Data_Geostat[,'Lon'], Year_i = Data_Geostat[,'Year'], - PlotDir = paste0(getwd(),"/"), + PlotDir = getwd(), Plot1_name = "Data_and_knots.png", Plot2_name = "Data_by_year.png", col = "red", @@ -65,12 +65,15 @@ function( Extrapolation_List, CRS_proj = sp::CRS( projargs ) # Data for mapping - map_data = rnaturalearth::ne_countries(scale=switch(map_resolution, "low"=110, "medium"=50, "high"=10, 50), country=country) + map_data = rnaturalearth::ne_countries( scale=switch(map_resolution, "low"=110, "medium"=50, "high"=10, 50), + country=country, returnclass="sf" ) # Fix warning messages from projecting rnaturalearth object # Solution: Recreate SpatialPolygonsDataFrame from output - map_data = sp::SpatialPolygonsDataFrame( Sr=sp::SpatialPolygons(slot(map_data,"polygons"),proj4string=CRS_orig), data=slot(map_data,"data") ) + #map_data = sp::SpatialPolygonsDataFrame( Sr=sp::SpatialPolygons(slot(map_data,"polygons"),proj4string=CRS_orig), data=slot(map_data,"data") ) # comment(slot(map_data, "proj4string")) = comment(sp::CRS("+proj=longlat")) - map_proj = sp::spTransform(map_data, CRSobj=CRS_proj) + #map_proj = sp::spTransform(map_data, CRSobj=CRS_proj) + map_proj = sf::st_transform(map_data, crs=sf::st_crs(CRS_proj) ) + map_proj = sf::as_Spatial( map_proj ) # project Lat_i/Lon_i sample_data = sp::SpatialPoints( coords=cbind(Lon_i,Lat_i), proj4string=CRS_orig ) @@ -83,7 +86,7 @@ function( Extrapolation_List, grid_data = sp::SpatialPoints( coords=Extrapolation_List$Data_Extrap[which_rows,c('Lon','Lat')], proj4string=CRS_orig ) grid_proj = sp::spTransform( grid_data, CRSobj=CRS_proj) - png( file=paste0(PlotDir,Plot1_name), width=6, height=6, res=200, units="in") + png( file=file.path(PlotDir,Plot1_name), width=6, height=6, res=200, units="in") par( mfrow=c(2,2), mar=c(3,3,2,0), mgp=c(1.75,0.25,0) ) plot( grid_data@coords, cex=0.01, main="Extrapolation (Lat-Lon)" ) sp::plot( map_data, col=land_color, add=TRUE ) @@ -100,7 +103,7 @@ function( Extrapolation_List, if( !any(unique(Year_i) %in% year_labels) ) year_labels = sort(unique(Year_i)) Nrow = ceiling( sqrt(length(year_labels)) ) Ncol = ceiling( length(year_labels)/Nrow ) - if(!is.null(Plot2_name)) png( file=paste0(PlotDir,Plot2_name), width=Ncol*2, height=Nrow*2, res=200, units="in") + if(!is.null(Plot2_name)) png( file=file.path(PlotDir,Plot2_name), width=Ncol*2, height=Nrow*2, res=200, units="in") par( mfrow=c(Nrow,Ncol), mar=c(0,0,2,0), mgp=c(1.75,0.25,0), oma=c(4,4,0,0) ) for( t in 1:length(year_labels) ){ plot( 1, type="n", xlim=range(sample_proj@coords[,1]), ylim=range(sample_proj@coords[,2]), main=year_labels[t], xaxt="n", yaxt="n" ) diff --git a/R/plot_factors.R b/R/plot_factors.R index 71f56ea..1d106fd 100644 --- a/R/plot_factors.R +++ b/R/plot_factors.R @@ -293,6 +293,7 @@ function( fit, }else{ factor_names = paste0("Factor_",1:dim(Var_rot$Psi_rot)[2]) } + tmp_names = paste0("Factor_",1:dim(Var_rot$Psi_rot)[3]) plot_maps( plot_set=c(6,6,NA,6,7,7,NA,7)[i], fit = fit, Report = Report2_tmp, @@ -300,6 +301,7 @@ function( fit, MapSizeRatio = mapdetails_list[["MapSizeRatio"]], working_dir = plotdir, category_names = factor_names, + #year_labels = tmp_names, Panel = "Year", legend_x = mapdetails_list[["Legend"]]$x/100, legend_y = mapdetails_list[["Legend"]]$y/100, @@ -318,7 +320,7 @@ function( fit, # Return stuff invisibly names(Hinv_list) = names(Psi2prime_list) = names(Psiprime_list) = names(Lprime_SE_list) = names(Lprime_list) = names(L_list) = c("Omega1", "Epsilon1", "Beta1", "EpsilonTime1", "Omega2", "Epsilon2", "Beta2", "EpsilonTime2") Return = list("Loadings"=L_list, "Rotated_loadings"=Lprime_list, "Rotated_factors"=Psiprime_list, "Rotated_projected_factors"=Psi2prime_list, "Rotation_matrices"=Hinv_list) - if( !missing(Obj) && class(SD)=="sdreport" ){ + if( !is.null(Obj) && class(SD)=="sdreport" ){ Return[["Rotated_loadings_SE"]] = Lprime_SE_list Return[["Rotated_projected_factors_SE"]] = Psi2prime_SE_list } diff --git a/R/plot_index.R b/R/plot_index.R index ac87bf1..68e9ca8 100644 --- a/R/plot_index.R +++ b/R/plot_index.R @@ -26,7 +26,7 @@ function( Index_ctl, category_names = NULL, scale = "uniform", plot_legend = NULL, - DirName = paste0(getwd(),"/"), + DirName = getwd(), PlotName = "Index.png", interval_width = 1, width = NULL, @@ -126,7 +126,7 @@ function( Index_ctl, # Plot Par = combine_lists( default=list(mar=c(2,2,1,0),mgp=c(2,0.5,0),tck=-0.02,yaxs="i",oma=c(2,2,0,0),mfrow=mfrow), input=list(...) ) if(!is.na(PlotName)){ - png( file=paste0(DirName,PlotName), width=width, height=height, res=200, units="in") # paste0(DirName,ifelse(DirName=="","","/"),PlotName) + png( file=file.path(DirName,PlotName), width=width, height=height, res=200, units="in") # paste0(DirName,ifelse(DirName=="","","/"),PlotName) on.exit( dev.off() ) } if(add==FALSE) par( Par ) diff --git a/R/plot_legend.R b/R/plot_legend.R new file mode 100644 index 0000000..145d048 --- /dev/null +++ b/R/plot_legend.R @@ -0,0 +1,40 @@ +#' @title +#' Plot standard maps +#' + +#' @export +plot_legend <- +function( Zlim, + legend_x = c(0,0.05), + legend_y = c(0.05,0.45), + cex.legend = 1, + col = viridisLite::viridis, + legend_digits = 1, + f = identity ){ + + # Boundaries + xl = (1-legend_x[1])*par('usr')[1] + (legend_x[1])*par('usr')[2] + xr = (1-legend_x[2])*par('usr')[1] + (legend_x[2])*par('usr')[2] + yb = (1-legend_y[1])*par('usr')[3] + (legend_y[1])*par('usr')[4] + yt = (1-legend_y[2])*par('usr')[3] + (legend_y[2])*par('usr')[4] + + # Logic + if( diff(legend_y) > diff(legend_x) ){ + align = c("lt","rb")[2] + gradient = c("x","y")[2] + }else{ + align = c("lt","rb")[1] + gradient = c("x","y")[1] + } + + # Make plot + plotrix::color.legend( xl=xl, + yb=yb, + xr=xr, + yt=yt, + legend = round(f(seq(Zlim[1],Zlim[2],length=4)),legend_digits), + rect.col=col(1000), + cex=cex.legend, + align=align, + gradient=gradient) +} diff --git a/R/plot_maps.r b/R/plot_maps.r index fd9c36f..d2910c5 100644 --- a/R/plot_maps.r +++ b/R/plot_maps.r @@ -53,7 +53,7 @@ function( plot_set = 3, years_to_plot = NULL, category_names = NULL, quiet = FALSE, - working_dir = paste0(getwd(),"/"), + working_dir = getwd(), MapSizeRatio = NULL, n_cells, plot_value = "estimate", diff --git a/R/plot_range_edge.R b/R/plot_range_edge.R index 8066b26..2feb85f 100644 --- a/R/plot_range_edge.R +++ b/R/plot_range_edge.R @@ -25,7 +25,7 @@ function( Sdreport, years_to_plot = NULL, strata_names = NULL, category_names = NULL, - working_dir = paste0(getwd(),"/"), + working_dir = getwd(), quantiles = c(0.05,0.95), n_samples = 100, interval_width = 1, @@ -125,20 +125,20 @@ function( Sdreport, for( mI in 1:dim(Edge_zctm)[4] ){ Index_zct = array(Edge_zctm[,,,mI,'Estimate'],dim(Edge_zctm)[1:3]) sd_Index_zct = array(Edge_zctm[,,,mI,'Std. Error'],dim(Edge_zctm)[1:3]) - plot_index( Index_ctl=aperm(Index_zct,c(2,3,1)), - sd_Index_ctl=aperm(sd_Index_zct,c(2,3,1)), - year_labels=year_labels, - years_to_plot=years_to_plot, - strata_names=quantiles, - category_names=category_names, - DirName=working_dir, - PlotName=paste0("RangeEdge_",m_labels[mI],".png"), - Yrange=c(NA,NA), - interval_width=interval_width, - width=width, - height=height, - xlab="Year", - ylab=paste0("Quantiles (",m_labels[mI],")") ) + plot_index( Index_ctl = aperm(Index_zct,c(2,3,1)), + sd_Index_ctl = aperm(sd_Index_zct,c(2,3,1)), + year_labels = year_labels, + years_to_plot = years_to_plot, + strata_names = quantiles, + category_names = category_names, + DirName = working_dir, + PlotName = paste0("RangeEdge_",m_labels[mI],".png"), + Yrange = c(NA,NA), + interval_width = interval_width, + width = width, + height = height, + xlab = "Year", + ylab = paste0("Quantiles (",m_labels[mI],")") ) } # Return list of stuff diff --git a/R/plot_range_index.R b/R/plot_range_index.R index 4d266a0..d8f1361 100644 --- a/R/plot_range_index.R +++ b/R/plot_range_index.R @@ -32,10 +32,10 @@ function( Sdreport, year_labels = NULL, years_to_plot = NULL, strata_names = NULL, - PlotDir = paste0(getwd(),"/"), - FileName_COG = paste0(PlotDir,"/center_of_gravity.png"), - FileName_Area = paste0(PlotDir,"/Area.png"), - FileName_EffArea = paste0(PlotDir,"/Effective_Area.png"), + PlotDir = getwd(), + FileName_COG = "center_of_gravity.png", + #FileName_Area = file.path(PlotDir,"Area.png"), + FileName_EffArea = "Effective_Area.png", Znames = rep("",ncol(TmbData$Z_gm)), use_biascorr = TRUE, category_names = NULL, @@ -97,7 +97,7 @@ function( Sdreport, # Plot center of gravity # Can't use `FishStatsUtils::plot_index` because of 2-column format - png( file=FileName_COG, width=6.5, height=TmbData$n_c*2, res=200, units="in") + png( file=file.path(PlotDir,"center_of_gravity.png"), width=6.5, height=TmbData$n_c*2, res=200, units="in") par( mar=c(2,2,1,0), mgp=c(1.75,0.25,0), tck=-0.02, oma=c(1,1,0,1.5), mfrow=c(TmbData$n_c,dim(SD_mean_Z_ctm)[[3]]), ... ) # for( cI in 1:TmbData$n_c ){ for( mI in 1:dim(SD_mean_Z_ctm)[[3]]){ @@ -195,7 +195,7 @@ function( Sdreport, years_to_plot = years_to_plot, strata_names = strata_names, category_names = category_names, - DirName = "", + DirName = PlotDir, PlotName = FileName_EffArea, scale = "log", interval_width = interval_width, diff --git a/R/plot_residual_semivariance.R b/R/plot_residual_semivariance.R index 6e99d8e..6db06d5 100644 --- a/R/plot_residual_semivariance.R +++ b/R/plot_residual_semivariance.R @@ -13,7 +13,7 @@ function( fit, dharma_raster, dharmaRes, file_name = "quantile_residuals_semivariance", - working_dir = paste0(getwd(),"/") ){ + working_dir = getwd() ){ # Transform tmp = dharma_raster$Raster_proj @@ -33,7 +33,7 @@ function( fit, residual_semivariance = gstat::variogram(values~1, stfdf, width=20, cutoff = 500, tlags=0:min(7,length(time)-1) ) # Plot - ThorsonUtilities::save_fig( file=paste0(working_dir,file_name), width=6, height=5 ) + ThorsonUtilities::save_fig( file=file.path(working_dir,file_name), width=6, height=5 ) # gstat:::plot.gstatVariogram # gstat:::plot.StVariogram semivariance_plot = plot( residual_semivariance, diff --git a/R/plot_results.R b/R/plot_results.R index c732ca6..9a29932 100644 --- a/R/plot_results.R +++ b/R/plot_results.R @@ -45,7 +45,7 @@ plot_results <- function( fit, settings = fit$settings, plot_set = 3, - working_dir = paste0(getwd(),"/"), + working_dir = getwd(), year_labels = fit$year_labels, years_to_plot = fit$years_to_plot, category_names = fit$category_names, @@ -99,8 +99,8 @@ function( fit, # Plot anisotropy message("\n### Making plot of anisotropy") - plot_anisotropy( FileName=paste0(working_dir,"Aniso.png"), - Obj=fit$tmb_list$Obj ) + plot_anisotropy( FileName = file.path(working_dir,"Aniso.png"), + Obj = fit$tmb_list$Obj ) # Plot index plot_biomass_index_args = list(...) @@ -139,13 +139,15 @@ function( fit, #if( !all(is.numeric(year_labels)) ) stop("`plot_biomass_index` isn't built to handle non-numeric `year_labels`") calculate_proportion_args = list(...) calculate_proportion_args = combine_lists( "input"=calculate_proportion_args, "args_to_use"=formalArgs(calculate_proportion), - "default" = list( TmbData = fit$data_list, + "default" = list( fit = fit, + TmbData = fit$data_list, Index = Index, year_labels = year_labels, years_to_plot = years_to_plot, use_biascorr = use_biascorr, category_names = category_names, - DirName = working_dir ) ) + DirName = working_dir, + n_samples = n_samples ) ) Proportions = do.call( what=calculate_proportion, args=calculate_proportion_args ) #Compositions = plot_biomass_index( DirName=working_dir, TmbData=fit$data_list, Sdreport=fit$parameter_estimates$SD, year_labels=year_labels, # years_to_plot=years_to_plot, use_biascorr=use_biascorr, category_names=category_names ) @@ -259,7 +261,11 @@ function( fit, # Plotting quantile residuals message("\n### Making quantile residuals using conditional simulation and package DHARMa") - dharmaRes = summary( fit, what="residuals", working_dir=working_dir, type=type, ... ) + dharmaRes = summary( fit, + what = "residuals", + working_dir = working_dir, + type = type, + ... ) # Mapping quantile residuals message("\n### Plotting quantile residuals ") diff --git a/R/plot_similarity.R b/R/plot_similarity.R index 8467b7c..77cf734 100644 --- a/R/plot_similarity.R +++ b/R/plot_similarity.R @@ -14,7 +14,7 @@ function( fit, year_labels = fit$year_labels, category_names = fit$category_names, similarity_metric = c("hclust", "Correlation", "Dissimilarity", "Covariance")[1], - working_dir = paste0(getwd(),"/"), + working_dir = getwd(), file_name = similarity_metric, panel_size = 3, Res = 200, @@ -30,19 +30,19 @@ function( fit, if( fit$data_list$n_c >= 2 ){ # if(Format=="png"){ - png(file=paste0(working_dir,file_name,".png"), + png(file=file.path(working_dir,paste0(file_name,".png")), width=2 * panel_size, height=4 * panel_size, res=Res, units='in') on.exit( dev.off() ) } if(Format=="jpg"){ - jpeg(file=paste0(working_dir,file_name,".jpg"), + jpeg(file=file.path(working_dir,paste0(file_name,".jpg")), width=2 * panel_size, height=4 * panel_size, res=Res, units='in') on.exit( dev.off() ) } if(Format%in%c("tif","tiff")){ - tiff(file=paste0(working_dir,file_name,".tif"), + tiff(file=file.path(working_dir,paste0(file_name,".tif")), width = 2 * panel_size, height = 4 * panel_size, res=Res, units='in') on.exit( dev.off() ) diff --git a/R/plot_variable.R b/R/plot_variable.R index 7fa4bf9..b9bd299 100644 --- a/R/plot_variable.R +++ b/R/plot_variable.R @@ -52,7 +52,7 @@ function( Y_gt, projargs = '+proj=longlat', map_resolution = "medium", file_name = "density", - working_dir = paste0(getwd(),"/"), + working_dir = getwd(), Format = "png", Res = 200, add = FALSE, @@ -132,12 +132,15 @@ function( Y_gt, # Data for mapping #map_data = rnaturalearth::ne_coastline(scale=switch(map_resolution, "low"=110, "medium"=50, "high"=10, 50), continent="america") - map_data = rnaturalearth::ne_countries(scale=switch(map_resolution, "low"=110, "medium"=50, "high"=10, 50), country=country) + map_data = rnaturalearth::ne_countries( scale=switch(map_resolution, "low"=110, "medium"=50, "high"=10, 50), + country=country, returnclass="sf" ) # Fix warning messages from projecting rnaturalearth object # Solution: Recreate SpatialPolygonsDataFrame from output - map_data = sp::SpatialPolygonsDataFrame( Sr=sp::SpatialPolygons(slot(map_data,"polygons"),proj4string=CRS_orig), data=slot(map_data,"data") ) + #map_data = sp::SpatialPolygonsDataFrame( Sr=sp::SpatialPolygons(slot(map_data,"polygons"),proj4string=CRS_orig), data=slot(map_data,"data") ) # comment(slot(map_data, "proj4string")) = comment(sp::CRS("+proj=longlat")) - map_proj = sp::spTransform(map_data, CRSobj=CRS_proj) + #map_proj = sp::spTransform(map_data, CRSobj=CRS_proj) + map_proj = sf::st_transform(map_data, crs=sf::st_crs(CRS_proj) ) + map_proj = sf::as_Spatial( map_proj ) ################### # Make panel figure @@ -146,19 +149,19 @@ function( Y_gt, # Define device Par = list( mfrow=mfrow, mar=mar, oma=oma, ...) if(Format=="png"){ - png(file=paste0(working_dir,file_name,".png"), + png(file=file.path(working_dir,paste0(file_name,".png")), width=Par$mfrow[2]*MapSizeRatio[2], height=Par$mfrow[1]*MapSizeRatio[1], res=Res, units='in') on.exit( dev.off() ) } if(Format=="jpg"){ - jpeg(file=paste0(working_dir,file_name,".jpg"), + jpeg(file=file.path(working_dir,paste0(file_name,".jpg")), width=Par$mfrow[2]*MapSizeRatio[2], height=Par$mfrow[1]*MapSizeRatio[1], res=Res, units='in') on.exit( dev.off() ) } if(Format%in%c("tif","tiff")){ - tiff(file=paste0(working_dir,file_name,".tif"), + tiff(file=file.path(working_dir,paste0(file_name,".tif")), width=Par$mfrow[2]*MapSizeRatio[2], height=Par$mfrow[1]*MapSizeRatio[1], res=Res, units='in') on.exit( dev.off() ) @@ -229,18 +232,19 @@ function( Y_gt, # Include legend if( !any(is.na(c(legend_x,legend_y))) & (tI==ncol(Y_gt) | is.na(zlim[1])) ){ - xl = (1-legend_x[1])*par('usr')[1] + (legend_x[1])*par('usr')[2] - xr = (1-legend_x[2])*par('usr')[1] + (legend_x[2])*par('usr')[2] - yb = (1-legend_y[1])*par('usr')[3] + (legend_y[1])*par('usr')[4] - yt = (1-legend_y[2])*par('usr')[3] + (legend_y[2])*par('usr')[4] - if( diff(legend_y) > diff(legend_x) ){ - align = c("lt","rb")[2] - gradient = c("x","y")[2] - }else{ - align = c("lt","rb")[1] - gradient = c("x","y")[1] - } - plotrix::color.legend(xl=xl, yb=yb, xr=xr, yt=yt, legend=round(seq(Zlim[1],Zlim[2],length=4),legend_digits), rect.col=col(1000), cex=cex.legend, align=align, gradient=gradient) + plot_legend( Zlim, legend_x=legend_x, legend_y=legend_y, cex.legend=cex.legend, col=col, legend_digits=legend_digits ) + #xl = (1-legend_x[1])*par('usr')[1] + (legend_x[1])*par('usr')[2] + #xr = (1-legend_x[2])*par('usr')[1] + (legend_x[2])*par('usr')[2] + #yb = (1-legend_y[1])*par('usr')[3] + (legend_y[1])*par('usr')[4] + #yt = (1-legend_y[2])*par('usr')[3] + (legend_y[2])*par('usr')[4] + #if( diff(legend_y) > diff(legend_x) ){ + # align = c("lt","rb")[2] + # gradient = c("x","y")[2] + #}else{ + # align = c("lt","rb")[1] + # gradient = c("x","y")[1] + #} + #plotrix::color.legend(xl=xl, yb=yb, xr=xr, yt=yt, legend=round(seq(Zlim[1],Zlim[2],length=4),legend_digits), rect.col=col(1000), cex=cex.legend, align=align, gradient=gradient) } } diff --git a/R/rotate_factors.R b/R/rotate_factors.R index 7b47f43..d8c5bbf 100644 --- a/R/rotate_factors.R +++ b/R/rotate_factors.R @@ -132,10 +132,13 @@ function( L_pj = NULL, # Flip around for( j in 1:dim(L_pj_rot)[2] ){ + Sign = sign(sum(L_pj_rot[,j])) + # sign(0) = 0, so switch 0 to 1 in that case + Sign = ifelse(Sign==0, 1, Sign) if( !is.null(Psi_sjt) ){ - Psi_rot[,j,] = Psi_rot[,j,] * sign(sum(L_pj_rot[,j])) + Psi_rot[,j,] = Psi_rot[,j,] * Sign } - L_pj_rot[,j] = L_pj_rot[,j] * sign(sum(L_pj_rot[,j])) + L_pj_rot[,j] = L_pj_rot[,j] * Sign } # Check for errors diff --git a/inst/extdata/EBS_pollock/Kmeans-100.RData b/inst/extdata/EBS_pollock/Kmeans_knots-100.RData similarity index 100% rename from inst/extdata/EBS_pollock/Kmeans-100.RData rename to inst/extdata/EBS_pollock/Kmeans_knots-100.RData diff --git a/man/calculate_proportion.Rd b/man/calculate_proportion.Rd index f75c231..30f3ae6 100644 --- a/man/calculate_proportion.Rd +++ b/man/calculate_proportion.Rd @@ -5,6 +5,7 @@ \title{Calculate the compositional-expansion} \usage{ calculate_proportion( + fit, TmbData, Index, Expansion_cz = NULL, @@ -12,8 +13,9 @@ calculate_proportion( years_to_plot = NULL, strata_names = NULL, category_names = NULL, + sample_size_method = c("Taylor_series", "sample_based"), plot_legend = ifelse(TmbData$n_l > 1, TRUE, FALSE), - DirName = paste0(getwd(), "/"), + DirName = getwd(), PlotName = "Proportion.png", PlotName2 = "Average.png", interval_width = 1, @@ -21,6 +23,7 @@ calculate_proportion( height = 6, xlab = "Category", ylab = "Proportion", + n_samples = 250, ... ) } @@ -30,15 +33,17 @@ calculate_proportion( \item{Index}{output from \code{FishStatsUtils::plot_biomass_index}} \item{Expansion_cz}{matrix specifying how densities are expanded when calculating annual indices, with a row for each category \code{c} and two columns. -The first column specifies whether to calculate annual index for category \code{c} as the weighted-sum across density estimates, -where density is weighted by area ("area-weighted expansion", \code{Expansion[c,1]=0}, the default), -where density is weighted by the expanded value for another category ("abundance weighted expansion" \code{Expansion[c1,1]=1}), -the index is calculated as the weighted average of density weighted by the expanded value for another category -("abundance weighted-average expansion" \code{Expansion[c1,1]=2}), or the area-weighted abundance is added to the expanded -abundance for a prior category \code{Expansion[c1,1]=3}). -The 2nd column is used when \code{Expansion[c1,1]=1} or \code{Expansion[c1,1]=2} or \code{Expansion[c1,1]=3}, -and specifies the category to use for abundance-weighted expansion/average/summation, -where \code{Expansion[c1,2]=c2} and \code{c2} must be lower than \code{c1}.} + The first column specifies whether to calculate annual index for category \code{c} as the weighted-sum across density estimates as follows: +\describe{ + \item{\code{Expansion[c,1]=0}}{ density is weighted by area ("area-weighted expansion", the default) } + \item{\code{Expansion[c,1]=1}}{ density is weighted by the expanded value for another category ("abundance weighted expansion") } + \item{\code{Expansion[c,1]=2}}{ the index is calculated as the weighted average of density weighted by the expanded value for another category ("abundance weighted-average expansion") } + \item{\code{Expansion[c,1]=3}}{ area-weighted abundance is added to the expanded abundance for a prior category ("area-weighted cumulative total") } + \item{\code{Expansion[c,1]=4}}{ The fraction across categories is calculated, and then multiplied by another area-weighted index ("abundance weighted proportional expansion") } +} + The 2nd column is used when \code{Expansion[c1,1]=1} or \code{Expansion[c1,1]=2} or \code{Expansion[c1,1]=3} or \code{Expansion[c1,1]=4}, + and specifies the category to use for abundance-weighted expansion/average/summation/proportions, + where \code{Expansion[c1,2]=c2} and \code{c2} must be lower than \code{c1}.} \item{year_labels}{character vector specifying names for labeling times \code{t_i}} @@ -48,6 +53,13 @@ where \code{Expansion[c1,2]=c2} and \code{c2} must be lower than \code{c1}.} \item{category_names}{names for categories (if using package \code{`VAST`})} +\item{sample_size_method}{Method used to calculate the variance in proportions, which is then converted to an approximately equivalent multinomial sample size that can be used as input-sample-size in a subsequent stock assessment model. Options are: +\describe{ + \item{\code{sample_size_method="Taylor_series"}}{a Taylor-series approximation to the ratio of X/(X+Y) where X is the category-specific index and Y is the index for for all other categories} + \item{\code{sample_size_method="sample_based"}}{Taking samples from the joint precision of fixed and random effects, calculating proportions for each sample, and then computing the variance across those samples} +} +The sample-based approximation is expected to have higher variance and therefore lower approximate sample size. However, it may also have poor performance in cases when variance estimates are imprecise (such that the multivariate-normal approximation to joint precision is poor), and has not been thoroughly groundtested in real-world cases.} + \item{plot_legend}{Add legend for labelling colors} \item{DirName}{Directory for saving plot and table} diff --git a/man/fit_model.Rd b/man/fit_model.Rd index 66c3b06..32a7a65 100644 --- a/man/fit_model.Rd +++ b/man/fit_model.Rd @@ -13,7 +13,7 @@ fit_model( a_i, c_iz = rep(0, length(b_i)), v_i = rep(0, length(b_i)), - working_dir = paste0(getwd(), "/"), + working_dir = tempdir(), X1config_cp = NULL, X2config_cp = NULL, covariate_data, @@ -69,7 +69,9 @@ to account for unlabeled multispecies data.} \item{v_i}{Vector of integers ranging from 0 to the number of vessels minus 1, providing sampling category (e.g., vessel or tow) associated with overdispersed variation for each observation i -(by default \code{v_i=0} for all samples, which will not affect things given the default values for \code{OverdispersionConfig})} +(by default \code{v_i=0} for all samples, which will not affect things given the default values for \code{OverdispersionConfig}). +In some cases a portion of observations have a overdispersion-effect, but not others, and in this case the observations without +are specified as \code{v_i=NA}} \item{X1config_cp}{matrix of settings for each density covariate for the 1st lienar predictor, where the row corresponds to model category, and column corresponds to each density covariate @@ -154,7 +156,7 @@ when fixed effects are at upper or lower bounds.} \item{...}{additional arguments to pass to \code{\link{make_extrapolation_info}}, \code{\link{make_spatial_info}}, \code{\link[VAST]{make_data}}, \code{\link[VAST]{make_model}}, or \code{\link[TMBhelper]{fit_tmb}}, where arguments are matched by name against each function. If an argument doesn't match, it is still passed to \code{\link[VAST]{make_data}}. Note that \code{\link{make_spatial_info}} -passes named arguments to \code{\link[INLA]{inla.mesh.create}}.} +passes named arguments to \code{\link[fmesher]{fm_mesh_2d}}.} } \value{ Object of class \code{fit_model}, containing formatted inputs and outputs from VAST diff --git a/man/make_extrapolation_info.Rd b/man/make_extrapolation_info.Rd index 63f089e..ef7781d 100644 --- a/man/make_extrapolation_info.Rd +++ b/man/make_extrapolation_info.Rd @@ -27,7 +27,7 @@ make_extrapolation_info( nstart = 100, area_tolerance = 0.05, backwards_compatible_kmeans = FALSE, - DirPath = paste0(getwd(), "/"), + DirPath = getwd(), ... ) } diff --git a/man/make_kmeans.Rd b/man/make_kmeans.Rd index 4a88508..d17cc82 100644 --- a/man/make_kmeans.Rd +++ b/man/make_kmeans.Rd @@ -10,7 +10,7 @@ make_kmeans( nstart = 100, randomseed = 1, iter.max = 1000, - DirPath = paste0(getwd(), "/"), + DirPath = getwd(), Save_Results = TRUE, kmeans_purpose = "spatial", backwards_compatible_kmeans = FALSE diff --git a/man/make_mesh.Rd b/man/make_mesh.Rd index 60873cd..32aa145 100644 --- a/man/make_mesh.Rd +++ b/man/make_mesh.Rd @@ -13,6 +13,7 @@ make_mesh( anisotropic_mesh = NULL, fine_scale = FALSE, map_data, + mesh_package = c("INLA", "fmesher"), ... ) } diff --git a/man/make_settings.Rd b/man/make_settings.Rd index 17afc01..9b61745 100644 --- a/man/make_settings.Rd +++ b/man/make_settings.Rd @@ -24,7 +24,8 @@ make_settings( n_categories, VamConfig, max_cells, - knot_method + knot_method, + mesh_package ) } \arguments{ @@ -109,18 +110,6 @@ where \code{0} is off, code{"AR1"} is an AR1 process, and aninteger greater than \item{bias.correct}{Boolean indicating whether to do epsilon bias-correction; see \code{\link[TMB]{sdreport}} and \code{\link[TMBhelper]{fit_tmb}}for details} -\item{Options}{a tagged-vector that is empty by default, \code{Options=c()}, but where the following slots might be helpful to add, - either by passing \code{Options} to \code{\link[FishStatsUtils]{make_settings}}, or editing after a call to that function: -\describe{ - \item{\code{Options["Calculate_Range"]=TRUE}}{Turns on internal calculation and SE for center-of-gravity} - \item{\code{Options["Calculate_effective_area"]=TRUE}}{Turns on internal calculation and SE for effective area occupied measuring range expansion/contraction} - \item{\code{Options["Calculate_Cov_SE"]=TRUE}}{Turns on internal calculation and SE for covariance among categories (i.e. in factor model)} - \item{\code{Options["Calculate_proportion"]=TRUE}}{Turns on internal calculation and SE for proportion of response within each category (e.g., for calculating proportion-at-age or species turnover)} - \item{\code{Options["Calculate_Synchrony"]=TRUE}}{Turns on internal calculation and SE for Loreau metric of synchrony (a.k.a. portfolio effects)} - \item{\code{Options["report_additional_variables"]=TRUE}}{Export additional variables to \code{Report} object, to use for diagnostics or additional exploration} - \item{\code{Options["basin_method"]}}{Controls how the density-dependent index is generated from model variables. Default \code{Options["basin_method"]=2}) uses annual mean of betas and epsilons as index. Alternative \code{Options["basin_method"]=4}) uses a Lagrange multiplier to penalize index towards total abundance} - \item{\code{Options["range_fraction"]}}{The decorrelation range when passing over land relative to over water; the default value \code{Options["range_fraction"]=0.2} indicates that the range is shorter over land, i.e., that correlations are strongest via water, while changing to \code{Options["range_fraction"]=5} would represent correlations transfer via land more than water}#' }} - \item{use_anisotropy}{Boolean indicating whether to estimate two additional parameters representing geometric anisotropy} \item{vars_to_correct}{a character-vector listing which parameters to include for bias-correction, as passed to \code{\link[TMBhelper]{fit_tmb}}} diff --git a/man/make_spatial_info.Rd b/man/make_spatial_info.Rd index 835c3e2..35960c9 100644 --- a/man/make_spatial_info.Rd +++ b/man/make_spatial_info.Rd @@ -20,11 +20,12 @@ make_spatial_info( iter.max = 1000, randomseed = 1, nstart = 100, - DirPath = paste0(getwd(), "/"), + DirPath = getwd(), Save_Results = FALSE, LON_intensity, LAT_intensity, backwards_compatible_kmeans = FALSE, + mesh_package = "INLA", ... ) } diff --git a/man/plot_biomass_index.Rd b/man/plot_biomass_index.Rd index 82e3048..fa26f03 100644 --- a/man/plot_biomass_index.Rd +++ b/man/plot_biomass_index.Rd @@ -6,7 +6,7 @@ \usage{ plot_biomass_index( fit, - DirName = paste0(getwd(), "/"), + DirName = getwd(), PlotName = "Index", interval_width = 1, years_to_plot = NULL, diff --git a/man/plot_clusters.Rd b/man/plot_clusters.Rd index e3c4ed6..4c17942 100644 --- a/man/plot_clusters.Rd +++ b/man/plot_clusters.Rd @@ -13,7 +13,7 @@ plot_clusters( year_labels = fit$year_labels, category_names = fit$category_names, map_list = NULL, - working_dir = paste0(getwd(), "/"), + working_dir = getwd(), file_name = paste0("Class-", var_name), file_name2 = paste0("Class-", var_name, "-averages"), replace_Inf_with_NA = TRUE, diff --git a/man/plot_data.Rd b/man/plot_data.Rd index 9fe8207..67a5ded 100644 --- a/man/plot_data.Rd +++ b/man/plot_data.Rd @@ -11,7 +11,7 @@ plot_data( Lat_i = Data_Geostat[, "Lat"], Lon_i = Data_Geostat[, "Lon"], Year_i = Data_Geostat[, "Year"], - PlotDir = paste0(getwd(), "/"), + PlotDir = getwd(), Plot1_name = "Data_and_knots.png", Plot2_name = "Data_by_year.png", col = "red", diff --git a/man/plot_factors.Rd b/man/plot_factors.Rd index 8dd74e5..3d93851 100644 --- a/man/plot_factors.Rd +++ b/man/plot_factors.Rd @@ -18,7 +18,7 @@ plot_factors( Dim_year = NULL, Dim_species = NULL, projargs = "+proj=longlat", - plotdir = paste0(getwd(), "/"), + plotdir = getwd(), land_color = "grey", zlim = NA, testcutoff = 1e-04, diff --git a/man/plot_index.Rd b/man/plot_index.Rd index 0436db5..feb5e1a 100644 --- a/man/plot_index.Rd +++ b/man/plot_index.Rd @@ -13,7 +13,7 @@ plot_index( category_names = NULL, scale = "uniform", plot_legend = NULL, - DirName = paste0(getwd(), "/"), + DirName = getwd(), PlotName = "Index.png", interval_width = 1, width = NULL, diff --git a/man/plot_legend.Rd b/man/plot_legend.Rd new file mode 100644 index 0000000..6ff07a1 --- /dev/null +++ b/man/plot_legend.Rd @@ -0,0 +1,19 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/plot_legend.R +\name{plot_legend} +\alias{plot_legend} +\title{Plot standard maps} +\usage{ +plot_legend( + Zlim, + legend_x = c(0, 0.05), + legend_y = c(0.05, 0.45), + cex.legend = 1, + col = viridisLite::viridis, + legend_digits = 1, + f = identity +) +} +\description{ +Plot standard maps +} diff --git a/man/plot_maps.Rd b/man/plot_maps.Rd index adff858..24e64f2 100644 --- a/man/plot_maps.Rd +++ b/man/plot_maps.Rd @@ -14,7 +14,7 @@ plot_maps( years_to_plot = NULL, category_names = NULL, quiet = FALSE, - working_dir = paste0(getwd(), "/"), + working_dir = getwd(), MapSizeRatio = NULL, n_cells, plot_value = "estimate", diff --git a/man/plot_range_edge.Rd b/man/plot_range_edge.Rd index 7533fcc..7e3bbd5 100644 --- a/man/plot_range_edge.Rd +++ b/man/plot_range_edge.Rd @@ -11,7 +11,7 @@ plot_range_edge( years_to_plot = NULL, strata_names = NULL, category_names = NULL, - working_dir = paste0(getwd(), "/"), + working_dir = getwd(), quantiles = c(0.05, 0.95), n_samples = 100, interval_width = 1, diff --git a/man/plot_range_index.Rd b/man/plot_range_index.Rd index 1fc121a..4535e6a 100644 --- a/man/plot_range_index.Rd +++ b/man/plot_range_index.Rd @@ -11,10 +11,9 @@ plot_range_index( year_labels = NULL, years_to_plot = NULL, strata_names = NULL, - PlotDir = paste0(getwd(), "/"), - FileName_COG = paste0(PlotDir, "/center_of_gravity.png"), - FileName_Area = paste0(PlotDir, "/Area.png"), - FileName_EffArea = paste0(PlotDir, "/Effective_Area.png"), + PlotDir = getwd(), + FileName_COG = "center_of_gravity.png", + FileName_EffArea = "Effective_Area.png", Znames = rep("", ncol(TmbData$Z_gm)), use_biascorr = TRUE, category_names = NULL, @@ -39,8 +38,6 @@ plot_range_index( \item{FileName_COG}{Full filename (including directory) for center-of-gravity plot} -\item{FileName_Area}{Full filename (including directory) for center-of-gravity plot} - \item{FileName_EffArea}{Full filename (including directory) for center-of-gravity plot} \item{Znames}{Names for center-of-gravity summary statistics} @@ -52,6 +49,8 @@ plot_range_index( \item{interval_width}{width for confidence intervals} \item{...}{Other inputs to `par()`} + +\item{FileName_Area}{Full filename (including directory) for center-of-gravity plot} } \value{ Return Tagged list of output diff --git a/man/plot_residual_semivariance.Rd b/man/plot_residual_semivariance.Rd index 78114fd..bd02933 100644 --- a/man/plot_residual_semivariance.Rd +++ b/man/plot_residual_semivariance.Rd @@ -9,7 +9,7 @@ plot_residual_semivariance( dharma_raster, dharmaRes, file_name = "quantile_residuals_semivariance", - working_dir = paste0(getwd(), "/") + working_dir = getwd() ) } \description{ diff --git a/man/plot_results.Rd b/man/plot_results.Rd index 163e4bc..fa6fb83 100644 --- a/man/plot_results.Rd +++ b/man/plot_results.Rd @@ -8,7 +8,7 @@ plot_results( fit, settings = fit$settings, plot_set = 3, - working_dir = paste0(getwd(), "/"), + working_dir = getwd(), year_labels = fit$year_labels, years_to_plot = fit$years_to_plot, category_names = fit$category_names, diff --git a/man/plot_similarity.Rd b/man/plot_similarity.Rd index 0b21c32..a2469d5 100644 --- a/man/plot_similarity.Rd +++ b/man/plot_similarity.Rd @@ -9,7 +9,7 @@ plot_similarity( year_labels = fit$year_labels, category_names = fit$category_names, similarity_metric = c("hclust", "Correlation", "Dissimilarity", "Covariance")[1], - working_dir = paste0(getwd(), "/"), + working_dir = getwd(), file_name = similarity_metric, panel_size = 3, Res = 200, diff --git a/man/plot_variable.Rd b/man/plot_variable.Rd index d105a4d..ec535cc 100644 --- a/man/plot_variable.Rd +++ b/man/plot_variable.Rd @@ -11,7 +11,7 @@ plot_variable( projargs = "+proj=longlat", map_resolution = "medium", file_name = "density", - working_dir = paste0(getwd(), "/"), + working_dir = getwd(), Format = "png", Res = 200, add = FALSE, @@ -78,7 +78,7 @@ will often need to be modified for a given purpose.} \item{fun}{function or character. To determine what values to assign to cells that are covered by multiple spatial features. You can use functions such as \code{min, max}, or \code{mean}, or one of the following character values: \code{'first'}, \code{'last'}, \code{'count'}. The default value is \code{'last'}. In the case of SpatialLines*, \code{'length'} is also allowed (currently for planar coordinate systems only). -If \code{x} represents points, \code{fun} must accept a \code{na.rm} argument, either explicitly or through 'dots'. This means that \code{fun=length} fails, but \code{fun=function(x,...)length(x)} works, although it ignores the \code{na.rm} argument. To use the \code{na.rm} argument you can use a function like this: fun=function(x, na.rm){if (na.rm) length(na.omit(x)) else (length(x)}, or use a function that removes \code{NA} values in all cases, like this function to compute the number of unique values per grid cell "richness": \code{fun=function(x, ...) {length(unique(na.omit(x)))} }. If you want to count the number of points in each grid cell, you can use \code{ fun='count'} or \code{fun=function(x,...){length(x)}}. +If \code{x} represents points, \code{fun} must accept a \code{na.rm} argument, either explicitly or through the ellipses ('dots'). This means that \code{fun=length} fails, but \code{fun=function(x,...)length(x)} works, although it ignores the \code{na.rm} argument. To use the \code{na.rm} argument you can use a function like this: \code{fun=function(x, na.rm){if (na.rm) length(na.omit(x)) else (length(x)}}, or use a function that removes \code{NA} values in all cases, like this function to compute the number of unique values per grid cell "richness": \code{fun=function(x, ...) {length(unique(na.omit(x)))} }. If you want to count the number of points in each grid cell, you can use \code{ fun='count'} or \code{fun=function(x,...){length(x)}}. You can also pass multiple functions using a statement like \code{fun=function(x, ...) c(length(x),mean(x))}, in which case the returned object is a RasterBrick (multiple layers). } diff --git a/man/predict.fit_model.Rd b/man/predict.fit_model.Rd index 2888602..ce34a5e 100644 --- a/man/predict.fit_model.Rd +++ b/man/predict.fit_model.Rd @@ -16,7 +16,7 @@ new_covariate_data = NULL, new_catchability_data = NULL, do_checks = TRUE, - working_dir = paste0(getwd(), "/") + working_dir = getwd() ) } \arguments{ @@ -43,7 +43,9 @@ to account for unlabeled multispecies data.} \item{v_i}{Vector of integers ranging from 0 to the number of vessels minus 1, providing sampling category (e.g., vessel or tow) associated with overdispersed variation for each observation i -(by default \code{v_i=0} for all samples, which will not affect things given the default values for \code{OverdispersionConfig})} +(by default \code{v_i=0} for all samples, which will not affect things given the default values for \code{OverdispersionConfig}). +In some cases a portion of observations have a overdispersion-effect, but not others, and in this case the observations without +are specified as \code{v_i=NA}} \item{keep_old_covariates}{Whether to add new_covariate_data to existing data. This is useful when predicting values at new locations, but does not work diff --git a/man/summary.fit_model.Rd b/man/summary.fit_model.Rd index e322520..ce3e018 100644 --- a/man/summary.fit_model.Rd +++ b/man/summary.fit_model.Rd @@ -20,7 +20,7 @@ \arguments{ \item{x}{Output from \code{\link{fit_model}}} -\item{what}{String indicating what to summarize; options are `density` or `residuals`} +\item{what}{String indicating what to summarize; options are `density`, `index` or `residuals`} \item{n_samples}{Number of samples used when \code{what="residuals"}}