diff --git a/Project.toml b/Project.toml index 782217c..fd2dcec 100644 --- a/Project.toml +++ b/Project.toml @@ -4,6 +4,7 @@ authors = ["xKDR Forum, Sourish Das"] version = "0.1.1" [deps] +AdvancedVI = "b5ca4192-6429-45e5-a2d9-87aec30a685c" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" @@ -19,6 +20,7 @@ StatsModels = "3eaba693-59b7-5ba5-a881-562e759f1c8d" Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" [compat] +AdvancedVI = "0.2.11" DataFrames = "1" Distributions = "0.25" Documenter = "0.27, 1" diff --git a/docs/src/api/bayesian_regression.md b/docs/src/api/bayesian_regression.md index 018096c..c9e9ad2 100644 --- a/docs/src/api/bayesian_regression.md +++ b/docs/src/api/bayesian_regression.md @@ -4,34 +4,42 @@ BayesianRegression ``` +## Bayesian Algorithms + +```@docs +BayesianAlgorithm +MCMC +VI +``` + ## Linear Regression ### Linear Regression with User Specific Gaussian Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, beta_prior_mean::Vector{Float64}, sim_size::Int64 = 1000) -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, beta_prior_mean::Vector{Float64}, algorithm::BayesianAlgorithm = MCMC()) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, algorithm::BayesianAlgorithm = MCMC()) ``` ### Linear Regression with Ridge Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 0.01) ``` ### Linear Regression with Laplace Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 0.01) ``` ### Linear Regression with Cauchy Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, algorithm::BayesianAlgorithm = MCMC()) ``` ### Linear Regression with T-distributed Prior ```@docs -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 2.0) ``` ### Linear Regression with Horse Shoe Prior ```@docs -fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,algorithm::BayesianAlgorithm = MCMC()) ``` ## Logistic Regression diff --git a/src/CRRao.jl b/src/CRRao.jl index 59c062d..4f54037 100644 --- a/src/CRRao.jl +++ b/src/CRRao.jl @@ -2,7 +2,7 @@ module CRRao using DataFrames, GLM, Turing, StatsModels, StatsBase using StatsBase, Distributions, LinearAlgebra -using Optim, NLSolversBase, Random, HypothesisTests +using Optim, NLSolversBase, Random, HypothesisTests, AdvancedVI import StatsBase: coef, coeftable, r2, adjr2, loglikelihood, aic, bic, predict, residuals, cooksdistance, fit import HypothesisTests: pvalue @@ -392,9 +392,47 @@ end Cauchit() = Cauchit(Cauchit_Link) +""" +```julia +BayesianAlgorithm +``` + +Abstract type representing bayesian algorithms which are used to dispatch to appropriate calls. +""" +abstract type BayesianAlgorithm end + +""" +```julia +MCMC <: BayesianAlgorithm +``` + +A type representing MCMC algorithms. +""" +struct MCMC <: BayesianAlgorithm + sim_size::Int64 + prediction_chain_start::Int64 +end + +MCMC() = MCMC(1000, 200) + +""" +```julia +VI <: BayesianAlgorithm +``` + +A type representing variational inference algorithms. +""" +struct VI <: BayesianAlgorithm + distribution_sample_count::Int64 + vi_max_iters::Int64 + vi_samples_per_step::Int64 +end + +VI() = VI(1000, 10000, 100) + export LinearRegression, LogisticRegression, PoissonRegression, NegBinomRegression, Boot_Residual export Prior_Ridge, Prior_Laplace, Prior_Cauchy, Prior_TDist, Prior_HorseShoe, Prior_Gauss -export CRRaoLink, Logit, Probit, Cloglog, Cauchit, fit +export CRRaoLink, Logit, Probit, Cloglog, Cauchit, fit, BayesianAlgorithm, MCMC, VI export coef, coeftable, r2, adjr2, loglikelihood, aic, bic, sigma, predict, residuals, cooksdistance, BPTest, pvalue export FrequentistRegression, BayesianRegression diff --git a/src/bayesian/getter.jl b/src/bayesian/getter.jl index 5e748d3..86a5b50 100644 --- a/src/bayesian/getter.jl +++ b/src/bayesian/getter.jl @@ -1,51 +1,27 @@ -function predict(container::BayesianRegression{:LinearRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) +function predict(container::BayesianRegression{:LinearRegression}, newdata::DataFrame) X = modelmatrix(container.formula, newdata) - - params = get_params(container.chain[prediction_chain_start:end,:,:]) - W = params.β - if isa(W, Tuple) - W = reduce(hcat, W) - end - #predictions = params.α' .+ X * W' - predictions = X * W' + W = container.samples + predictions = X * W return vec(mean(predictions, dims=2)) end function predict(container::BayesianRegression{:LogisticRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) - - params = get_params(container.chain[prediction_chain_start:end,:,:]) - W = params.β - if isa(W, Tuple) - W = reduce(hcat, W) - end - #z = params.α' .+ X * W' - z = X * W' + W = container.samples[:, prediction_chain_start:end] + z = X * W return vec(mean(container.link.link_function.(z), dims=2)) end function predict(container::BayesianRegression{:NegativeBinomialRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) - - params = get_params(container.chain[prediction_chain_start:end,:,:]) - W = params.β - if isa(W, Tuple) - W = reduce(hcat, W) - end - #z = params.α' .+ X * W' - z = X * W' + W = container.samples[:, prediction_chain_start:end] + z = X * W return vec(mean(exp.(z), dims=2)) end function predict(container::BayesianRegression{:PoissonRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) - - params = get_params(container.chain[prediction_chain_start:end,:,:]) - W = params.β - if isa(W, Tuple) - W = reduce(hcat, W) - end - #z = params.α' .+ X * W' - z = X * W' + W = container.samples[:, prediction_chain_start:end] + z = X * W return vec(mean(exp.(z), dims=2)) end \ No newline at end of file diff --git a/src/bayesian/linear_regression.jl b/src/bayesian/linear_regression.jl index 010b148..959bf7a 100644 --- a/src/bayesian/linear_regression.jl +++ b/src/bayesian/linear_regression.jl @@ -1,17 +1,47 @@ -function linear_reg(formula::FormulaTerm, data::DataFrame, turingModel::Function, sim_size::Int64) - formula = apply_schema(formula, schema(formula, data),RegressionModel) +function linear_reg(formula::FormulaTerm, data::DataFrame, turingModel::Function, algorithm::MCMC) + formula = apply_schema(formula, schema(formula, data), RegressionModel) y, X = modelcols(formula, data) - if sim_size < 500 + if algorithm.sim_size < 500 @warn "Simulation size should generally be atleast 500." end - chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:LinearRegression, chain, formula) + chain = sample(CRRao_rng, turingModel(X, y), NUTS(), algorithm.sim_size) + params = get_params(chain[algorithm.prediction_chain_start:end,:,:]) + samples = params.β + if isa(samples, Tuple) + samples = reduce(hcat, samples) + end + samples = samples' + + return BayesianRegression(:LinearRegression, samples, formula) +end + +function linear_reg(formula::FormulaTerm, data::DataFrame, turingModel::Function, algorithm::VI) + formula = apply_schema(formula, schema(formula, data), RegressionModel) + y, X = modelcols(formula, data) + + if algorithm.vi_max_iters < 500 + @warn "Max iterations should generally be atleast 500." + end + + model = turingModel(X, y) + dist = vi(model, ADVI(algorithm.vi_samples_per_step, algorithm.vi_max_iters)) + samples = rand(CRRao_rng, dist, algorithm.distribution_sample_count) + _, symbol_to_range = bijector(model, Val(true)) + samples = samples[union(symbol_to_range[:β]...), :] + return BayesianRegression(:LinearRegression, samples, formula) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Ridge, + algorithm::BayesianAlgorithm = MCMC(), + h::Float64 = 0.01, +) ``` Fit a Bayesian Linear Regression model on the input data with a Ridge prior. @@ -77,8 +107,8 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, + algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 0.01, - sim_size::Int64 = 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -97,12 +127,19 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Laplace, + algorithm::BayesianAlgorithm = MCMC(), + h::Float64 = 0.01, +) ``` Fit a Bayesian Linear Regression model on the input data with a Laplace prior. @@ -166,8 +203,8 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, + algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 0.01, - sim_size::Int64 = 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -185,12 +222,18 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Cauchy, + algorithm::BayesianAlgorithm = MCMC(), +) ``` Fit a Bayesian Linear Regression model on the input data with a Cauchy prior. @@ -253,7 +296,7 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, - sim_size::Int64 = 1000 + algorithm::BayesianAlgorithm = MCMC(), ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -268,12 +311,19 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_TDist, + algorithm::BayesianAlgorithm = MCMC(), + h::Float64 = 2.0, +) ``` Fit a Bayesian Linear Regression model on the input data with a t(ν) distributed prior. @@ -340,8 +390,8 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, + algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 2.0, - sim_size::Int64 = 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -359,13 +409,19 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_HorseShoe, + algorithm::BayesianAlgorithm = MCMC(), +) ``` Fit a Bayesian Linear Regression model on the input data with a HorseShoe prior. @@ -425,7 +481,7 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_HorseShoe, - sim_size::Int64 = 1000 + algorithm::BayesianAlgorithm = MCMC(), ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -446,12 +502,20 @@ function fit( y ~ MvNormal( X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss,alpha_prior_mean::Float64 = 0.0, beta_prior_mean::Float64, sim_size::Int64 = 1000, h::Float64 = 0.1) +fit( + formula::FormulaTerm, + ata::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + beta_prior_mean::Vector{Float64}, + algorithm::BayesianAlgorithm = MCMC(), +) ``` Fit a Bayesian Linear Regression model on the input data with a Gaussian prior with user specific prior mean for α and β. User doesnot have @@ -497,13 +561,13 @@ Quantiles ``` """ function fit( - formula::FormulaTerm - , data::DataFrame - , modelClass::LinearRegression - , prior::Prior_Gauss - , alpha_prior_mean::Float64 - , beta_prior_mean::Vector{Float64} - , sim_size::Int64 = 1000 + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + beta_prior_mean::Vector{Float64}, + algorithm::BayesianAlgorithm = MCMC(), ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -529,13 +593,23 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + alpha_prior_sd::Float64, + beta_prior_mean::Vector{Float64}, + beta_prior_sd::Vector{Float64}, + algorithm::BayesianAlgorithm = MCMC(), +) ``` Fit a Bayesian Linear Regression model on the input data with a Gaussian prior with user specific prior mean and sd for α and β. @@ -580,15 +654,15 @@ Quantiles ``` """ function fit( - formula::FormulaTerm - , data::DataFrame - , modelClass::LinearRegression - , prior::Prior_Gauss - , alpha_prior_mean::Float64 - , alpha_prior_sd::Float64 - , beta_prior_mean::Vector{Float64} - , beta_prior_sd::Vector{Float64} - , sim_size::Int64 = 1000 + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + alpha_prior_sd::Float64, + beta_prior_mean::Vector{Float64}, + beta_prior_sd::Vector{Float64}, + algorithm::BayesianAlgorithm = MCMC(), ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -616,5 +690,5 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + return linear_reg(formula, data, LinearRegression, algorithm) end \ No newline at end of file diff --git a/src/bayesian/logistic_regression.jl b/src/bayesian/logistic_regression.jl index 4c7d83a..682717c 100644 --- a/src/bayesian/logistic_regression.jl +++ b/src/bayesian/logistic_regression.jl @@ -6,7 +6,14 @@ function logistic_reg(formula::FormulaTerm, data::DataFrame, Link::CRRaoLink, tu @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:LogisticRegression, chain, formula, Link) + params = get_params(chain[:,:,:]) + samples = params.β + if isa(samples, Tuple) + samples = reduce(hcat, samples) + end + samples = samples' + + return BayesianRegression(:LogisticRegression, samples, formula, Link) end """ diff --git a/src/bayesian/negativebinomial_regression.jl b/src/bayesian/negativebinomial_regression.jl index e2f8a47..7070cee 100644 --- a/src/bayesian/negativebinomial_regression.jl +++ b/src/bayesian/negativebinomial_regression.jl @@ -11,7 +11,14 @@ function negativebinomial_reg( @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:NegativeBinomialRegression, chain, formula) + params = get_params(chain[:,:,:]) + samples = params.β + if isa(samples, Tuple) + samples = reduce(hcat, samples) + end + samples = samples' + + return BayesianRegression(:NegativeBinomialRegression, samples, formula) end """ diff --git a/src/bayesian/poisson_regression.jl b/src/bayesian/poisson_regression.jl index 0653ea2..3f175cd 100644 --- a/src/bayesian/poisson_regression.jl +++ b/src/bayesian/poisson_regression.jl @@ -6,7 +6,14 @@ function poisson_reg(formula::FormulaTerm, data::DataFrame, turingModel::Functio @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:PoissonRegression, chain, formula) + params = get_params(chain[:,:,:]) + samples = params.β + if isa(samples, Tuple) + samples = reduce(hcat, samples) + end + samples = samples' + + return BayesianRegression(:PoissonRegression, samples, formula) end """ diff --git a/src/fitmodel.jl b/src/fitmodel.jl index 046fa98..ec7b7a5 100644 --- a/src/fitmodel.jl +++ b/src/fitmodel.jl @@ -25,22 +25,22 @@ FrequentistRegression(RegressionType::Symbol, model, formula, link = GLM.Identit BayesianRegression{RegressionType} ``` -Type to represent bayesian regression models returned by `fit` functions. This type is used internally by the package to represent all bayesian regression models. `RegressionType` is a `Symbol` representing the model class. +Type to represent bayesian regression models (using MCMC) returned by `fit` functions. This type is used internally by the package to represent all bayesian regression models using MCMC. `RegressionType` is a `Symbol` representing the model class. """ struct BayesianRegression{RegressionType} <: RegressionModel - chain + samples formula::FormulaTerm link end """ ```julia -BayesianRegression(::Symbol, chain) +BayesianRegression(::Symbol, samples) ``` Constructor for `BayesianRegression`. `model` can be any regression model. Used by `fit` functions to return a bayesian regression model container. """ -BayesianRegression(RegressionType::Symbol, chain, formula, link = Identity()) = BayesianRegression{RegressionType}(chain, formula, link) +BayesianRegression(RegressionType::Symbol, samples, formula, link = Identity()) = BayesianRegression{RegressionType}(samples, formula, link) # Print Messages include("print.jl") diff --git a/src/print.jl b/src/print.jl index 5faa261..f216e5b 100644 --- a/src/print.jl +++ b/src/print.jl @@ -30,6 +30,6 @@ end function Base.show(io::IO, container::BayesianRegression) println(io, "Formula: ", container.formula) println(io, "Link: ", container.link) - print(io, "Chain: ") - show(io, MIME("text/plain"), container.chain) -end + print(io, "Samples: ") + show(io, MIME("text/plain"), container.samples) +end \ No newline at end of file diff --git a/test/numerical/bayesian/LinearRegression.jl b/test/numerical/bayesian/LinearRegression.jl index 3930434..339e3f3 100644 --- a/test/numerical/bayesian/LinearRegression.jl +++ b/test/numerical/bayesian/LinearRegression.jl @@ -9,15 +9,31 @@ tests = [ ] for (prior, test_mean) in tests + # MCMC CRRao.set_rng(StableRNG(123)) model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), prior) - prediction = predict(model, mtcars) - @test mean(prediction) - 2 * std(prediction) <= test_mean && test_mean <= mean(prediction) + 2 * std(prediction) + mcmc_prediction = predict(model, mtcars) + @test mean(mcmc_prediction) - 2 * std(mcmc_prediction) <= test_mean && test_mean <= mean(mcmc_prediction) + 2 * std(mcmc_prediction) + + # VI + CRRao.set_rng(StableRNG(123)) + model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), prior, VI()) + vi_prediction = predict(model, mtcars) + @test mean(vi_prediction) - 2 * std(vi_prediction) <= test_mean && test_mean <= mean(vi_prediction) + 2 * std(vi_prediction) + + @test maximum(abs.(mcmc_prediction - vi_prediction)) <= 5.0 end gauss_test = 20.0796026428345 CRRao.set_rng(StableRNG(123)) -model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0], 1000) -prediction = predict(model, mtcars) -@test mean(prediction) - 2 * std(prediction) <= gauss_test && gauss_test <= mean(prediction) + 2 * std(prediction) +model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0]) +mcmc_prediction = predict(model, mtcars) +@test mean(mcmc_prediction) - 2 * std(mcmc_prediction) <= gauss_test && gauss_test <= mean(mcmc_prediction) + 2 * std(mcmc_prediction) + +CRRao.set_rng(StableRNG(123)) +model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0], VI()) +vi_prediction = predict(model, mtcars) +@test mean(vi_prediction) - 2 * std(vi_prediction) <= gauss_test && gauss_test <= mean(vi_prediction) + 2 * std(vi_prediction) + +@test maximum(abs.(mcmc_prediction - vi_prediction)) <= 5.0 \ No newline at end of file