Skip to content

Commit

Permalink
Merge pull request #154 from xKDR/add-vi
Browse files Browse the repository at this point in the history
Add VI for linear regression
  • Loading branch information
sourish-cmi authored Dec 18, 2024
2 parents 5f9e94f + f1947a3 commit c189f4a
Show file tree
Hide file tree
Showing 11 changed files with 232 additions and 97 deletions.
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ authors = ["xKDR Forum, Sourish Das"]
version = "0.1.1"

[deps]
AdvancedVI = "b5ca4192-6429-45e5-a2d9-87aec30a685c"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f"
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
Expand All @@ -19,6 +20,7 @@ StatsModels = "3eaba693-59b7-5ba5-a881-562e759f1c8d"
Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0"

[compat]
AdvancedVI = "0.2.11"
DataFrames = "1"
Distributions = "0.25"
Documenter = "0.27, 1"
Expand Down
22 changes: 15 additions & 7 deletions docs/src/api/bayesian_regression.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,34 +4,42 @@
BayesianRegression
```

## Bayesian Algorithms

```@docs
BayesianAlgorithm
MCMC
VI
```

## Linear Regression

### Linear Regression with User Specific Gaussian Prior
```@docs
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, beta_prior_mean::Vector{Float64}, sim_size::Int64 = 1000)
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, sim_size::Int64 = 1000)
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, beta_prior_mean::Vector{Float64}, algorithm::BayesianAlgorithm = MCMC())
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Gauss, alpha_prior_mean::Float64, alpha_prior_sd::Float64, beta_prior_mean::Vector{Float64}, beta_prior_sd::Vector{Float64}, algorithm::BayesianAlgorithm = MCMC())
```

### Linear Regression with Ridge Prior
```@docs
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, h::Float64 = 0.01, sim_size::Int64 = 1000)
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Ridge, algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 0.01)
```

### Linear Regression with Laplace Prior
```@docs
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, h::Float64 = 0.01, sim_size::Int64 = 1000)
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Laplace, algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 0.01)
```
### Linear Regression with Cauchy Prior
```@docs
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, sim_size::Int64 = 1000)
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_Cauchy, algorithm::BayesianAlgorithm = MCMC())
```
### Linear Regression with T-distributed Prior
```@docs
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, h::Float64 = 2.0, sim_size::Int64 = 1000)
fit(formula::FormulaTerm, data::DataFrame, modelClass::LinearRegression, prior::Prior_TDist, algorithm::BayesianAlgorithm = MCMC(), h::Float64 = 2.0)
```
### Linear Regression with Horse Shoe Prior
```@docs
fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,sim_size::Int64 = 1000)
fit(formula::FormulaTerm,data::DataFrame,modelClass::LinearRegression,prior::Prior_HorseShoe,algorithm::BayesianAlgorithm = MCMC())
```

## Logistic Regression
Expand Down
42 changes: 40 additions & 2 deletions src/CRRao.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ module CRRao

using DataFrames, GLM, Turing, StatsModels, StatsBase
using StatsBase, Distributions, LinearAlgebra
using Optim, NLSolversBase, Random, HypothesisTests
using Optim, NLSolversBase, Random, HypothesisTests, AdvancedVI
import StatsBase: coef, coeftable, r2, adjr2, loglikelihood, aic, bic, predict, residuals, cooksdistance, fit
import HypothesisTests: pvalue

Expand Down Expand Up @@ -392,9 +392,47 @@ end

Cauchit() = Cauchit(Cauchit_Link)

"""
```julia
BayesianAlgorithm
```
Abstract type representing bayesian algorithms which are used to dispatch to appropriate calls.
"""
abstract type BayesianAlgorithm end

"""
```julia
MCMC <: BayesianAlgorithm
```
A type representing MCMC algorithms.
"""
struct MCMC <: BayesianAlgorithm
sim_size::Int64
prediction_chain_start::Int64
end

MCMC() = MCMC(1000, 200)

"""
```julia
VI <: BayesianAlgorithm
```
A type representing variational inference algorithms.
"""
struct VI <: BayesianAlgorithm
distribution_sample_count::Int64
vi_max_iters::Int64
vi_samples_per_step::Int64
end

VI() = VI(1000, 10000, 100)

export LinearRegression, LogisticRegression, PoissonRegression, NegBinomRegression, Boot_Residual
export Prior_Ridge, Prior_Laplace, Prior_Cauchy, Prior_TDist, Prior_HorseShoe, Prior_Gauss
export CRRaoLink, Logit, Probit, Cloglog, Cauchit, fit
export CRRaoLink, Logit, Probit, Cloglog, Cauchit, fit, BayesianAlgorithm, MCMC, VI
export coef, coeftable, r2, adjr2, loglikelihood, aic, bic, sigma, predict, residuals, cooksdistance, BPTest, pvalue
export FrequentistRegression, BayesianRegression

Expand Down
42 changes: 9 additions & 33 deletions src/bayesian/getter.jl
Original file line number Diff line number Diff line change
@@ -1,51 +1,27 @@
function predict(container::BayesianRegression{:LinearRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200)
function predict(container::BayesianRegression{:LinearRegression}, newdata::DataFrame)
X = modelmatrix(container.formula, newdata)

params = get_params(container.chain[prediction_chain_start:end,:,:])
W = params.β
if isa(W, Tuple)
W = reduce(hcat, W)
end
#predictions = params.α' .+ X * W'
predictions = X * W'
W = container.samples
predictions = X * W
return vec(mean(predictions, dims=2))
end

function predict(container::BayesianRegression{:LogisticRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200)
X = modelmatrix(container.formula, newdata)

params = get_params(container.chain[prediction_chain_start:end,:,:])
W = params.β
if isa(W, Tuple)
W = reduce(hcat, W)
end
#z = params.α' .+ X * W'
z = X * W'
W = container.samples[:, prediction_chain_start:end]
z = X * W
return vec(mean(container.link.link_function.(z), dims=2))
end

function predict(container::BayesianRegression{:NegativeBinomialRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200)
X = modelmatrix(container.formula, newdata)

params = get_params(container.chain[prediction_chain_start:end,:,:])
W = params.β
if isa(W, Tuple)
W = reduce(hcat, W)
end
#z = params.α' .+ X * W'
z = X * W'
W = container.samples[:, prediction_chain_start:end]
z = X * W
return vec(mean(exp.(z), dims=2))
end

function predict(container::BayesianRegression{:PoissonRegression}, newdata::DataFrame, prediction_chain_start::Int64 = 200)
X = modelmatrix(container.formula, newdata)

params = get_params(container.chain[prediction_chain_start:end,:,:])
W = params.β
if isa(W, Tuple)
W = reduce(hcat, W)
end
#z = params.α' .+ X * W'
z = X * W'
W = container.samples[:, prediction_chain_start:end]
z = X * W
return vec(mean(exp.(z), dims=2))
end
Loading

0 comments on commit c189f4a

Please sign in to comment.