From ea273bbff9e00701d19da0c3a5f42cd26b662b7c Mon Sep 17 00:00:00 2001 From: Aki Vehtari Date: Fri, 12 Apr 2024 21:51:05 +0300 Subject: [PATCH] update psis ref + some minor typo fixes --- R/loo_moment_matching.R | 4 ++-- R/loo_subsample.R | 16 +++++++--------- man-roxygen/loo-and-compare-references.R | 4 ++-- man-roxygen/loo-and-psis-references.R | 6 +++--- vignettes/loo2-example.Rmd | 6 ++---- vignettes/loo2-large-data.Rmd | 6 ++---- vignettes/loo2-lfo.Rmd | 3 +-- vignettes/loo2-mixis.Rmd | 3 +-- vignettes/loo2-moment-matching.Rmd | 6 ++---- vignettes/loo2-non-factorized.Rmd | 3 +-- vignettes/loo2-weights.Rmd | 3 +-- vignettes/loo2-with-rstan.Rmd | 6 ++---- 12 files changed, 26 insertions(+), 40 deletions(-) diff --git a/R/loo_moment_matching.R b/R/loo_moment_matching.R index b8832e7e..3721a885 100644 --- a/R/loo_moment_matching.R +++ b/R/loo_moment_matching.R @@ -195,7 +195,7 @@ loo_moment_match.default <- function(x, loo, post_draws, log_lik_i, #' @param i observation number. #' @param x A fitted model object. #' @param log_lik_i A function that takes `x` and `i` and returns a matrix (one -#' column per chain) or a vector (all chains stacked) of log-likeliood draws +#' column per chain) or a vector (all chains stacked) of log-likelihood draws #' of the `i`th observation based on the model `x`. If the draws are obtained #' using MCMC, the matrix with MCMC chains separated is preferred. #' @param unconstrain_pars A function that takes arguments `x`, and `pars` and @@ -440,7 +440,7 @@ loo_moment_match_i <- function(i, #' `upars` and returns a matrix of log-posterior density values of the #' unconstrained posterior draws passed via `upars`. #' @param log_lik_i_upars A function that takes arguments `x`, `upars`, -#' and `i` and returns a vector of log-likeliood draws of the `i`th +#' and `i` and returns a vector of log-likelihood draws of the `i`th #' observation based on the unconstrained posterior draws passed via #' `upars`. #' @param r_eff_i MCMC effective sample size divided by the total sample size diff --git a/R/loo_subsample.R b/R/loo_subsample.R index fd59b065..d79bafa1 100644 --- a/R/loo_subsample.R +++ b/R/loo_subsample.R @@ -316,7 +316,7 @@ update.psis_loo_ss <- function(object, ..., stopifnot(is.data.frame(data) || is.matrix(data) & !is.null(draws)) } - # Compute subsample indecies + # Compute subsample indices if (length(observations) > 1) { idxs <- compute_idxs(observations) } else { @@ -448,7 +448,7 @@ nobs.psis_loo_ss <- function(object, ...) { #' @details #' The choice `psis` is returned if a `psis_loo` object #' is converted to a `psis_loo_ss` object with `as.psis_loo_ss()`. -#' But `psis` cannot be chosen in the api of `loo_subsample()`. +#' But `psis` cannot be chosen in the API of `loo_subsample()`. #' #' @noRd #' @param api The choices available in the loo API or all possible choices. @@ -756,7 +756,7 @@ compute_idxs <- function(observations) { } -#' Compare the indecies to prepare handling +#' Compare the indices to prepare handling #' #' @details #' The function compares the object and sampled indices into `new` @@ -1193,14 +1193,15 @@ srs_diff_est <- function(y_approx, y, y_idx) { # eq (9) first row second `+` should be `-` # Supplementary material eq (6) has this correct # Here the variance is for sum, while in the paper the variance is for mean - # which explains the proporional difference of 1/n + # which explains the proportional difference of 1/N est_list$hat_v_y <- (t_pi2_tilde + t_hat_epsilon) - # a (has been checked) (1/N) * (t_e^2 - est_list$v_y_hat + 2 * t_pi_tilde * est_list$y_hat - t_pi_tilde^2) # b est_list } -#' Estimate elpd using the standard SRS estimator and SRS WOR +#' Estimate elpd using the standard simple-re-sample without +#' resampling (SRS-WOR) estimator #' @noRd #' @param x A `psis_loo_ss` object. #' @return A `psis_loo_ss` object. @@ -1220,7 +1221,7 @@ loo_subsample_estimation_srs <- function(x) { update_psis_loo_ss_estimates(x) } -#' Simple SRS-WOR estimation +#' Simple-re-sample without resampling (SRS-WOR) estimation #' @noRd #' @param y The values observed. #' @param y_approx A vector of length N. @@ -1324,6 +1325,3 @@ assert_subsampling_pointwise <- function(x) { checkmate::assert_names(colnames(x), identical.to = c("elpd_loo", "mcse_elpd_loo", "p_loo", "looic", "influence_pareto_k", "idx", "m_i", "elpd_loo_approx")) x } - - - diff --git a/man-roxygen/loo-and-compare-references.R b/man-roxygen/loo-and-compare-references.R index 13571284..9c8bd866 100644 --- a/man-roxygen/loo-and-compare-references.R +++ b/man-roxygen/loo-and-compare-references.R @@ -7,8 +7,8 @@ #' #' Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). #' Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -#' accepted for publication. -#' [preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +#' 25(72):1-58. +#' [PDF](https://jmlr.org/papers/v25/19-556.html) #' #' Sivula, T, Magnusson, M., Matamoros A. A., and Vehtari, A. (2022). #' Uncertainty in Bayesian leave-one-out cross-validation based model diff --git a/man-roxygen/loo-and-psis-references.R b/man-roxygen/loo-and-psis-references.R index cb64416a..a11231fb 100644 --- a/man-roxygen/loo-and-psis-references.R +++ b/man-roxygen/loo-and-psis-references.R @@ -6,7 +6,7 @@ #' [preprint arXiv:1507.04544](https://arxiv.org/abs/1507.04544)). #' #' Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). -#' Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -#' accepted for publication. -#' [preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +#' Pareto smoothed importance sampling. *Journal of Machine Learning Research*, +#' 25(72):1-58. +#' [PDF](https://jmlr.org/papers/v25/19-556.html) #' diff --git a/vignettes/loo2-example.Rmd b/vignettes/loo2-example.Rmd index 046e6ebb..a4aa3a50 100644 --- a/vignettes/loo2-example.Rmd +++ b/vignettes/loo2-example.Rmd @@ -32,8 +32,7 @@ encourage readers to refer to the following papers for more details: * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) # Setup @@ -302,5 +301,4 @@ Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) diff --git a/vignettes/loo2-large-data.Rmd b/vignettes/loo2-large-data.Rmd index a7af3340..0dadaf7e 100644 --- a/vignettes/loo2-large-data.Rmd +++ b/vignettes/loo2-large-data.Rmd @@ -37,8 +37,7 @@ Proceedings of the 23rd International Conference on Artificial Intelligence and * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) which provide important background for understanding the methods implemented in the package. @@ -613,5 +612,4 @@ Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) diff --git a/vignettes/loo2-lfo.Rmd b/vignettes/loo2-lfo.Rmd index 265a8387..5582f222 100644 --- a/vignettes/loo2-lfo.Rmd +++ b/vignettes/loo2-lfo.Rmd @@ -642,8 +642,7 @@ Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation us Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html)
diff --git a/vignettes/loo2-mixis.Rmd b/vignettes/loo2-mixis.Rmd index f51619c2..8d24e76f 100644 --- a/vignettes/loo2-mixis.Rmd +++ b/vignettes/loo2-mixis.Rmd @@ -197,8 +197,7 @@ Vehtari A., Gelman A., and Gabry J. (2017). Practical Bayesian model evaluation Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) diff --git a/vignettes/loo2-moment-matching.Rmd b/vignettes/loo2-moment-matching.Rmd index 9f67ce39..5f6687a1 100644 --- a/vignettes/loo2-moment-matching.Rmd +++ b/vignettes/loo2-moment-matching.Rmd @@ -45,8 +45,7 @@ papers * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) # Example: Eradication of Roaches @@ -322,5 +321,4 @@ Vehtari, A., Gelman, A., and Gabry, J. (2017). Practical Bayesian model evaluati Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) diff --git a/vignettes/loo2-non-factorized.Rmd b/vignettes/loo2-non-factorized.Rmd index a7a3a741..bbcf6c81 100644 --- a/vignettes/loo2-non-factorized.Rmd +++ b/vignettes/loo2-non-factorized.Rmd @@ -718,5 +718,4 @@ Vehtari A., Gelman A., & Gabry J. (2017). Practical Bayesian model evaluation us Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) diff --git a/vignettes/loo2-weights.Rmd b/vignettes/loo2-weights.Rmd index 20629235..36280aa6 100644 --- a/vignettes/loo2-weights.Rmd +++ b/vignettes/loo2-weights.Rmd @@ -368,8 +368,7 @@ Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) Yao, Y., Vehtari, A., Simpson, D., and Gelman, A. (2018). Using stacking to average Bayesian predictive distributions. In Bayesian diff --git a/vignettes/loo2-with-rstan.Rmd b/vignettes/loo2-with-rstan.Rmd index fc526c3d..23ddf364 100644 --- a/vignettes/loo2-with-rstan.Rmd +++ b/vignettes/loo2-with-rstan.Rmd @@ -31,8 +31,7 @@ Some sections from this vignette are excerpted from our papers * Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html) which provide important background for understanding the methods implemented in the package. @@ -239,5 +238,4 @@ Computing_. 27(5), 1413--1432. \doi:10.1007/s11222-016-9696-4. Vehtari, A., Simpson, D., Gelman, A., Yao, Y., and Gabry, J. (2024). Pareto smoothed importance sampling. *Journal of Machine Learning Research*, -accepted for publication. -[arXiv preprint arXiv:1507.02646](https://arxiv.org/abs/1507.02646) +25(72):1-58. [PDF](https://jmlr.org/papers/v25/19-556.html)