diff --git a/Project.toml b/Project.toml index 782217c..fd2dcec 100644 --- a/Project.toml +++ b/Project.toml @@ -4,6 +4,7 @@ authors = ["xKDR Forum, Sourish Das"] version = "0.1.1" [deps] +AdvancedVI = "b5ca4192-6429-45e5-a2d9-87aec30a685c" DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" @@ -19,6 +20,7 @@ StatsModels = "3eaba693-59b7-5ba5-a881-562e759f1c8d" Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" [compat] +AdvancedVI = "0.2.11" DataFrames = "1" Distributions = "0.25" Documenter = "0.27, 1" diff --git a/src/CRRao.jl b/src/CRRao.jl index 59c062d..19b6c49 100644 --- a/src/CRRao.jl +++ b/src/CRRao.jl @@ -2,7 +2,7 @@ module CRRao using DataFrames, GLM, Turing, StatsModels, StatsBase using StatsBase, Distributions, LinearAlgebra -using Optim, NLSolversBase, Random, HypothesisTests +using Optim, NLSolversBase, Random, HypothesisTests, AdvancedVI import StatsBase: coef, coeftable, r2, adjr2, loglikelihood, aic, bic, predict, residuals, cooksdistance, fit import HypothesisTests: pvalue @@ -396,7 +396,7 @@ export LinearRegression, LogisticRegression, PoissonRegression, NegBinomRegressi export Prior_Ridge, Prior_Laplace, Prior_Cauchy, Prior_TDist, Prior_HorseShoe, Prior_Gauss export CRRaoLink, Logit, Probit, Cloglog, Cauchit, fit export coef, coeftable, r2, adjr2, loglikelihood, aic, bic, sigma, predict, residuals, cooksdistance, BPTest, pvalue -export FrequentistRegression, BayesianRegression +export FrequentistRegression, BayesianRegressionMCMC, BayesianRegressionVI include("random_number_generator.jl") include("general_stats.jl") diff --git a/src/bayesian/getter.jl b/src/bayesian/getter.jl index 5e748d3..9f0fc20 100644 --- a/src/bayesian/getter.jl +++ b/src/bayesian/getter.jl @@ -1,4 +1,4 @@ -function predict(container::BayesianRegression{:LinearRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) +function predict(container::BayesianRegressionMCMC{:LinearRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) params = get_params(container.chain[prediction_chain_start:end,:,:]) @@ -11,7 +11,16 @@ function predict(container::BayesianRegression{:LinearRegression}, newdata::Data return vec(mean(predictions, dims=2)) end -function predict(container::BayesianRegression{:LogisticRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) +function predict(container::BayesianRegressionVI{:LinearRegression}, newdata::DataFrame, number_of_samples::Int64 = 1000) + X = modelmatrix(container.formula, newdata) + + W = rand(CRRao_rng, container.dist, number_of_samples) + W = W[union(container.symbol_to_range[:β]...), :] + predictions = X * W + return vec(mean(predictions, dims=2)) +end + +function predict(container::BayesianRegressionMCMC{:LogisticRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) params = get_params(container.chain[prediction_chain_start:end,:,:]) @@ -24,7 +33,16 @@ function predict(container::BayesianRegression{:LogisticRegression}, newdata::Da return vec(mean(container.link.link_function.(z), dims=2)) end -function predict(container::BayesianRegression{:NegativeBinomialRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) +function predict(container::BayesianRegressionVI{:LogisticRegression}, newdata::DataFrame, number_of_samples::Int64 = 1000) + X = modelmatrix(container.formula, newdata) + + W = rand(CRRao_rng, container.dist, number_of_samples) + W = W[union(container.symbol_to_range[:β]...), :] + z = X * W + return vec(mean(container.link.link_function.(z), dims=2)) +end + +function predict(container::BayesianRegressionMCMC{:NegativeBinomialRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) params = get_params(container.chain[prediction_chain_start:end,:,:]) @@ -37,7 +55,7 @@ function predict(container::BayesianRegression{:NegativeBinomialRegression}, new return vec(mean(exp.(z), dims=2)) end -function predict(container::BayesianRegression{:PoissonRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) +function predict(container::BayesianRegressionMCMC{:PoissonRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200) X = modelmatrix(container.formula, newdata) params = get_params(container.chain[prediction_chain_start:end,:,:]) diff --git a/src/bayesian/linear_regression.jl b/src/bayesian/linear_regression.jl index 010b148..d50dc3e 100644 --- a/src/bayesian/linear_regression.jl +++ b/src/bayesian/linear_regression.jl @@ -1,4 +1,4 @@ -function linear_reg(formula::FormulaTerm, data::DataFrame, turingModel::Function, sim_size::Int64) +function linear_reg_mcmc(formula::FormulaTerm, data::DataFrame, turingModel::Function, sim_size::Int64) formula = apply_schema(formula, schema(formula, data),RegressionModel) y, X = modelcols(formula, data) @@ -6,12 +6,33 @@ function linear_reg(formula::FormulaTerm, data::DataFrame, turingModel::Function @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:LinearRegression, chain, formula) + return BayesianRegressionMCMC(:LinearRegression, chain, formula) +end + +function linear_reg_vi(formula::FormulaTerm, data::DataFrame, turingModel::Function, max_iter::Int64) + formula = apply_schema(formula, schema(formula, data),RegressionModel) + y, X = modelcols(formula, data) + + if max_iter < 500 + @warn "Max iterations should generally be atleast 500." + end + model = turingModel(X, y) + dist = vi(model, ADVI(100, max_iter)) + _, symbol_to_range = bijector(model, Val(true)) + return BayesianRegressionVI(:LinearRegression, dist, formula, symbol_to_range) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Ridge, + use_vi::Bool = false, + h::Float64 = 0.01, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a Ridge prior. @@ -77,8 +98,9 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, + use_vi::Bool = false, h::Float64 = 0.01, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -97,12 +119,24 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Laplace, + use_vi::Bool = false, + h::Float64 = 0.01, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a Laplace prior. @@ -166,8 +200,9 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, + use_vi::Bool = false, h::Float64 = 0.01, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -185,12 +220,23 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Cauchy, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a Cauchy prior. @@ -253,7 +299,8 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, - sim_size::Int64 = 1000 + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -268,12 +315,24 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_TDist, + h::Float64 = 2.0, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a t(ν) distributed prior. @@ -340,8 +399,9 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, + use_vi::Bool = false, h::Float64 = 2.0, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -359,13 +419,24 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_HorseShoe, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a HorseShoe prior. @@ -425,7 +496,8 @@ function fit( data::DataFrame, modelClass::LinearRegression, prior::Prior_HorseShoe, - sim_size::Int64 = 1000 + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -446,12 +518,25 @@ function fit( y ~ MvNormal( X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss,alpha_prior_mean::Float64 = 0.0, beta_prior_mean::Float64, sim_size::Int64 = 1000, h::Float64 = 0.1) +fit( + formula::FormulaTerm, + ata::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + beta_prior_mean::Vector{Float64}, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a Gaussian prior with user specific prior mean for α and β. User doesnot have @@ -497,13 +582,14 @@ Quantiles ``` """ function fit( - formula::FormulaTerm - , data::DataFrame - , modelClass::LinearRegression - , prior::Prior_Gauss - , alpha_prior_mean::Float64 - , beta_prior_mean::Vector{Float64} - , sim_size::Int64 = 1000 + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + beta_prior_mean::Vector{Float64}, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -529,13 +615,28 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + alpha_prior_sd::Float64, + beta_prior_mean::Vector{Float64}, + beta_prior_sd::Vector{Float64}, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000, +) ``` Fit a Bayesian Linear Regression model on the input data with a Gaussian prior with user specific prior mean and sd for α and β. @@ -580,15 +681,16 @@ Quantiles ``` """ function fit( - formula::FormulaTerm - , data::DataFrame - , modelClass::LinearRegression - , prior::Prior_Gauss - , alpha_prior_mean::Float64 - , alpha_prior_sd::Float64 - , beta_prior_mean::Vector{Float64} - , beta_prior_sd::Vector{Float64} - , sim_size::Int64 = 1000 + formula::FormulaTerm, + data::DataFrame, + modelClass::LinearRegression, + prior::Prior_Gauss, + alpha_prior_mean::Float64, + alpha_prior_sd::Float64, + beta_prior_mean::Vector{Float64}, + beta_prior_sd::Vector{Float64}, + use_vi::Bool = false, + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LinearRegression(X, y) = begin p = size(X, 2) @@ -616,5 +718,9 @@ function fit( y ~ MvNormal(X * β, σ) end - return linear_reg(formula, data, LinearRegression, sim_size) + if use_vi + return linear_reg_vi(formula, data, LinearRegression, sim_size) + else + return linear_reg_mcmc(formula, data, LinearRegression, sim_size) + end end \ No newline at end of file diff --git a/src/bayesian/logistic_regression.jl b/src/bayesian/logistic_regression.jl index 4c7d83a..0853509 100644 --- a/src/bayesian/logistic_regression.jl +++ b/src/bayesian/logistic_regression.jl @@ -1,4 +1,4 @@ -function logistic_reg(formula::FormulaTerm, data::DataFrame, Link::CRRaoLink, turingModel::Function, sim_size::Int64) +function logistic_reg_mcmc(formula::FormulaTerm, data::DataFrame, Link::CRRaoLink, turingModel::Function, sim_size::Int64) formula = apply_schema(formula, schema(formula, data),RegressionModel) y, X = modelcols(formula, data) @@ -6,12 +6,34 @@ function logistic_reg(formula::FormulaTerm, data::DataFrame, Link::CRRaoLink, tu @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:LogisticRegression, chain, formula, Link) + return BayesianRegressionMCMC(:LogisticRegression, chain, formula, Link) +end + +function logistic_reg_vi(formula::FormulaTerm, data::DataFrame, Link::CRRaoLink, turingModel::Function, max_iter::Int64) + formula = apply_schema(formula, schema(formula, data),RegressionModel) + y, X = modelcols(formula, data) + + if max_iter < 500 + @warn "Max iterations should generally be atleast 500." + end + model = turingModel(X, y) + dist = vi(model, ADVI(100, max_iter)) + _, symbol_to_range = bijector(model, Val(true)) + return BayesianRegressionVI(:LinearRegression, dist, formula, symbol_to_range) end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Ridge, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LogisticRegression, + Link::CRRaoLink, prior::Prior_Ridge, + use_vi::Bool = false, + h::Float64 = 0.1, + level::Float64 = 0.95, + sim_size::Int64 = use_vi ? 20000 : 1000 +) ``` Fit a Bayesian Logistic Regression model on the input data with a Ridge prior with the provided `Link` function. @@ -185,9 +207,10 @@ function fit( modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Ridge, + use_vi::Bool = false, h::Float64 = 0.1, level::Float64 = 0.95, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LogisticRegression(X, y) = begin p = size(X, 2) @@ -210,12 +233,26 @@ function fit( end end - return logistic_reg(formula, data, Link, LogisticRegression, sim_size) + if use_vi + return logistic_reg_vi(formula, data, Link, LogisticRegression, sim_size) + else + return logistic_reg_mcmc(formula, data, Link, LogisticRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Laplace, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LogisticRegression, + Link::CRRaoLink, + prior::Prior_Laplace, + use_vi::Bool = false, + h::Float64 = 0.1, + level::Float64 = 0.95, + sim_size::Int64 = use_vi ? 20000 : 1000 +) ``` Fit a Bayesian Logistic Regression model on the input data with a Laplace prior with the provided `Link` function. @@ -384,9 +421,10 @@ function fit( modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Laplace, + use_vi::Bool = false, h::Float64 = 0.1, level::Float64 = 0.95, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LogisticRegression(X, y) = begin p = size(X, 2) @@ -409,12 +447,26 @@ function fit( end end - return logistic_reg(formula, data, Link, LogisticRegression, sim_size) + if use_vi + return logistic_reg_vi(formula, data, Link, LogisticRegression, sim_size) + else + return logistic_reg_mcmc(formula, data, Link, LogisticRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Cauchy, h::Float64 = 0.1, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LogisticRegression, + Link::CRRaoLink, + prior::Prior_Cauchy, + use_vi::Bool = false, + h::Float64 = 0.1, + level::Float64 = 0.95, + sim_size::Int64 = use_vi ? 20000 : 1000 +) ``` Fit a Bayesian Logistic Regression model on the input data with a Cauchy prior with the provided `Link` function. @@ -578,9 +630,10 @@ function fit( modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_Cauchy, + use_vi::Bool = false, h::Float64 = 0.1, level::Float64 = 0.95, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LogisticRegression(X, y) = begin p = size(X, 2) @@ -603,12 +656,26 @@ function fit( end end - return logistic_reg(formula, data, Link, LogisticRegression, sim_size) + if use_vi + return logistic_reg_vi(formula, data, Link, LogisticRegression, sim_size) + else + return logistic_reg_mcmc(formula, data, Link, LogisticRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm, data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_TDist, h::Float64 = 1.0, level::Float64 = 0.95, sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LogisticRegression, + Link::CRRaoLink, + prior::Prior_TDist, + use_vi::Bool = false, + h::Float64 = 3.0, + level::Float64 = 0.95, + sim_size::Int64 = use_vi ? 20000 : 1000 +) ``` Fit a Bayesian Logistic Regression model on the input data with a T-Dist prior with the provided `Link` function. @@ -797,9 +864,10 @@ function fit( modelClass::LogisticRegression, Link::CRRaoLink, prior::Prior_TDist, + use_vi::Bool = false, h::Float64 = 3.0, level::Float64 = 0.95, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LogisticRegression(X, y) = begin p = size(X, 2) @@ -823,14 +891,27 @@ function fit( end end - return logistic_reg(formula, data, Link, LogisticRegression, sim_size) + if use_vi + return logistic_reg_vi(formula, data, Link, LogisticRegression, sim_size) + else + return logistic_reg_mcmc(formula, data, Link, LogisticRegression, sim_size) + end end """ ```julia -fit(formula::FormulaTerm,data::DataFrame,modelClass::LogisticRegression,Link::CRRaoLink,prior::Prior_HorseShoe,level::Float64 = 0.95,sim_size::Int64 = 1000) +fit( + formula::FormulaTerm, + data::DataFrame, + modelClass::LogisticRegression, + Link::CRRaoLink, + use_vi::Bool = false, + prior::Prior_HorseShoe, + level::Float64 = 0.95, + sim_size::Int64 = use_vi ? 20000 : 1000 +) ``` Fit a Bayesian Logistic Regression model on the input data with a HorseShoe prior with the provided `Link` function. @@ -1033,9 +1114,10 @@ function fit( data::DataFrame, modelClass::LogisticRegression, Link::CRRaoLink, + use_vi::Bool = false, prior::Prior_HorseShoe, level::Float64 = 0.95, - sim_size::Int64 = 1000 + sim_size::Int64 = use_vi ? 20000 : 1000 ) @model LogisticRegression(X, y) = begin p = size(X, 2) @@ -1069,5 +1151,9 @@ function fit( end end - return logistic_reg(formula, data, Link, LogisticRegression, sim_size) + if use_vi + return logistic_reg_vi(formula, data, Link, LogisticRegression, sim_size) + else + return logistic_reg_mcmc(formula, data, Link, LogisticRegression, sim_size) + end end \ No newline at end of file diff --git a/src/bayesian/negativebinomial_regression.jl b/src/bayesian/negativebinomial_regression.jl index e2f8a47..7e220c7 100644 --- a/src/bayesian/negativebinomial_regression.jl +++ b/src/bayesian/negativebinomial_regression.jl @@ -11,7 +11,7 @@ function negativebinomial_reg( @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:NegativeBinomialRegression, chain, formula) + return BayesianRegressionMCMC(:NegativeBinomialRegression, chain, formula) end """ diff --git a/src/bayesian/poisson_regression.jl b/src/bayesian/poisson_regression.jl index 0653ea2..1bec20e 100644 --- a/src/bayesian/poisson_regression.jl +++ b/src/bayesian/poisson_regression.jl @@ -6,7 +6,7 @@ function poisson_reg(formula::FormulaTerm, data::DataFrame, turingModel::Functio @warn "Simulation size should generally be atleast 500." end chain = sample(CRRao_rng, turingModel(X, y), NUTS(), sim_size) - return BayesianRegression(:PoissonRegression, chain, formula) + return BayesianRegressionMCMC(:PoissonRegression, chain, formula) end """ diff --git a/src/fitmodel.jl b/src/fitmodel.jl index 046fa98..df45575 100644 --- a/src/fitmodel.jl +++ b/src/fitmodel.jl @@ -22,12 +22,12 @@ FrequentistRegression(RegressionType::Symbol, model, formula, link = GLM.Identit """ ```julia -BayesianRegression{RegressionType} +BayesianRegressionMCMC{RegressionType} ``` -Type to represent bayesian regression models returned by `fit` functions. This type is used internally by the package to represent all bayesian regression models. `RegressionType` is a `Symbol` representing the model class. +Type to represent bayesian regression models (using MCMC) returned by `fit` functions. This type is used internally by the package to represent all bayesian regression models using MCMC. `RegressionType` is a `Symbol` representing the model class. """ -struct BayesianRegression{RegressionType} <: RegressionModel +struct BayesianRegressionMCMC{RegressionType} <: RegressionModel chain formula::FormulaTerm link @@ -35,12 +35,35 @@ end """ ```julia -BayesianRegression(::Symbol, chain) +BayesianRegressionMCMC(::Symbol, chain) ``` -Constructor for `BayesianRegression`. `model` can be any regression model. Used by `fit` functions to return a bayesian regression model container. +Constructor for `BayesianRegressionMCMC`. `model` can be any regression model. Used by `fit` functions to return a bayesian regression model container. """ -BayesianRegression(RegressionType::Symbol, chain, formula, link = Identity()) = BayesianRegression{RegressionType}(chain, formula, link) +BayesianRegressionMCMC(RegressionType::Symbol, chain, formula, link = Identity()) = BayesianRegressionMCMC{RegressionType}(chain, formula, link) + +""" +```julia +BayesianRegressionVI{RegressionType} +``` + +Type to represent bayesian regression models (using VI) returned by `fit` functions. This type is used internally by the package to represent all bayesian regression models using VI. `RegressionType` is a `Symbol` representing the model class. +""" +struct BayesianRegressionVI{RegressionType} <: RegressionModel + dist + formula::FormulaTerm + symbol_to_range + link +end + +""" +```julia +BayesianRegressionVI(::Symbol, dist) +``` + +Constructor for `BayesianRegressionVI`. `model` can be any regression model. Used by `fit` functions to return a bayesian regression model container. +""" +BayesianRegressionVI(RegressionType::Symbol, dist, formula, symbol_to_range, link = Identity()) = BayesianRegressionVI{RegressionType}(dist, formula, symbol_to_range, link) # Print Messages include("print.jl") diff --git a/src/print.jl b/src/print.jl index 5faa261..2b57ed7 100644 --- a/src/print.jl +++ b/src/print.jl @@ -27,9 +27,16 @@ function Base.show(io::IO, container::FrequentistRegression{:PoissonRegression}) end # Bayesian Models -function Base.show(io::IO, container::BayesianRegression) +function Base.show(io::IO, container::BayesianRegressionMCMC) println(io, "Formula: ", container.formula) println(io, "Link: ", container.link) print(io, "Chain: ") show(io, MIME("text/plain"), container.chain) end + +function Base.show(io::IO, container::BayesianRegressionVI) + println(io, "Formula: ", container.formula) + println(io, "Link: ", container.link) + print(io, "Distribution: ") + show(io, MIME("text/plain"), container.dist) +end \ No newline at end of file diff --git a/test/numerical/bayesian/LinearRegression.jl b/test/numerical/bayesian/LinearRegression.jl index 3930434..f1285dc 100644 --- a/test/numerical/bayesian/LinearRegression.jl +++ b/test/numerical/bayesian/LinearRegression.jl @@ -9,15 +9,27 @@ tests = [ ] for (prior, test_mean) in tests + # MCMC CRRao.set_rng(StableRNG(123)) model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), prior) prediction = predict(model, mtcars) @test mean(prediction) - 2 * std(prediction) <= test_mean && test_mean <= mean(prediction) + 2 * std(prediction) + + # VI + CRRao.set_rng(StableRNG(123)) + model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), prior, true) + prediction = predict(model, mtcars) + @test mean(prediction) - 2 * std(prediction) <= test_mean && test_mean <= mean(prediction) + 2 * std(prediction) end gauss_test = 20.0796026428345 CRRao.set_rng(StableRNG(123)) -model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0], 1000) +model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0]) prediction = predict(model, mtcars) @test mean(prediction) - 2 * std(prediction) <= gauss_test && gauss_test <= mean(prediction) + 2 * std(prediction) + +CRRao.set_rng(StableRNG(123)) +model = fit(@formula(MPG ~ HP + WT + Gear), mtcars, LinearRegression(), Prior_Gauss(), 30.0, [0.0,-3.0,1.0], true) +prediction = predict(model, mtcars) +@test mean(prediction) - 2 * std(prediction) <= gauss_test && gauss_test <= mean(prediction) + 2 * std(prediction) \ No newline at end of file diff --git a/test/numerical/bayesian/LogisticRegression.jl b/test/numerical/bayesian/LogisticRegression.jl index 470d66b..1e42cc6 100644 --- a/test/numerical/bayesian/LogisticRegression.jl +++ b/test/numerical/bayesian/LogisticRegression.jl @@ -49,10 +49,19 @@ tests = [ ] for (prior, prior_testcases) in tests + # MCMC for (link, test_mean) in prior_testcases CRRao.set_rng(StableRNG(123)) model = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), link, prior) prediction = predict(model, turnout) @test mean(prediction) - 2 * std(prediction) <= test_mean && test_mean <= mean(prediction) + 2 * std(prediction) end + + # VI + for (link, test_mean) in prior_testcases + CRRao.set_rng(StableRNG(123)) + model = fit(@formula(Vote ~ Age + Race + Income + Educate), turnout, LogisticRegression(), link, prior, true) + prediction = predict(model, turnout) + @test mean(prediction) - 2 * std(prediction) <= test_mean && test_mean <= mean(prediction) + 2 * std(prediction) + end end \ No newline at end of file