Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Drop support for pre-1.10 #129

Merged
merged 34 commits into from
Dec 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
98dfa14
drop pre 1.10 in CI
Red-Portal Oct 8, 2024
82246ad
remove SimpleUnPack
Red-Portal Oct 8, 2024
0ee74f4
fix conditional testing on Enzyme and Mooncake
Red-Portal Oct 8, 2024
410f221
add missing import for Mooncake and Enzyme
Red-Portal Oct 8, 2024
32fc814
Merge branch 'master' of github.com:TuringLang/AdvancedVI.jl into dro…
Red-Portal Oct 8, 2024
006c7f1
fix remove unpack syntax left behind
Red-Portal Oct 8, 2024
1d292e5
fix remove remaining @unpack
Red-Portal Oct 9, 2024
3b61f1c
fix formatting
Red-Portal Oct 9, 2024
640e64c
fix wrong merge
Red-Portal Oct 9, 2024
1b9e29b
fix formatting
Red-Portal Oct 9, 2024
71cbd21
bump Documenter compat
Red-Portal Oct 9, 2024
008977f
disable Zygote for scoregradelbo bijectors
Red-Portal Oct 9, 2024
3f164dc
fix remove uses of `SimpleUnPack` in docs
Red-Portal Oct 21, 2024
dfdb59d
fix move footnote references from docstrings to the actual docs
Red-Portal Oct 21, 2024
5d7a258
remove redundant references in docstring
Red-Portal Oct 21, 2024
ee36b54
fix stop treating missing docs as error
Red-Portal Oct 21, 2024
8922a3d
Merge branch 'master' into drop_support_1.6_lts
yebai Oct 21, 2024
7859082
refactor testing on Enzyme is now its own workflow
Red-Portal Oct 21, 2024
0602f4e
add Enzyme workflow
Red-Portal Oct 21, 2024
3adb923
fix error in repgradelbo interface test
Red-Portal Oct 21, 2024
b92d382
fix name of Enzyme workflow
Red-Portal Oct 21, 2024
09b81ee
refactor test group organizations
Red-Portal Oct 21, 2024
f206f7b
fix error in inference tests
Red-Portal Oct 21, 2024
52bbbed
bump compat bound for `Optimisers` and `Functors`
Red-Portal Nov 9, 2024
466808d
fix relax exactness check for `rand!(LocationScale)`
Red-Portal Nov 9, 2024
9aa3621
fix weaken equality tests, update tolerance for `LocationScale` test
Red-Portal Nov 13, 2024
f754796
fix formatting
Red-Portal Nov 13, 2024
ecb4813
fix test on LTS instead of "1.10"
Red-Portal Nov 30, 2024
870778b
fix conditionally load Enzyme in tests
Red-Portal Nov 30, 2024
1efed26
bump setup-julia action version
Red-Portal Nov 30, 2024
0730cd9
fix remove Enzyme in non-Enzyme AD test
Red-Portal Dec 1, 2024
351b2ea
bump Julia, Bijectors, Functors, Optimisers version
Red-Portal Dec 3, 2024
2a3cfd4
Merge branch 'master' of github.com:TuringLang/AdvancedVI.jl into dro…
Red-Portal Dec 3, 2024
065d1e3
fix build error update setup-julia action for Enzyme tests
Red-Portal Dec 3, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ jobs:
fail-fast: false
matrix:
version:
- '1.7'
- '1.10'
- 'lts'
- '1'
os:
- ubuntu-latest
- macOS-latest
Expand All @@ -29,7 +29,7 @@ jobs:
- x64
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v1
- uses: julia-actions/setup-julia@v2
with:
version: ${{ matrix.version }}
arch: ${{ matrix.arch }}
Expand Down
40 changes: 40 additions & 0 deletions .github/workflows/Enzyme.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
name: Enzyme
on:
push:
branches:
- master
tags: ['*']
pull_request:
workflow_dispatch:
concurrency:
# Skip intermediate builds: always.
# Cancel intermediate builds: only if it is a pull request build.
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }}
jobs:
test:
name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }}
env:
TEST_GROUP: Enzyme
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
version:
- 'lts'
- '1'
os:
- ubuntu-latest
- macOS-latest
- windows-latest
arch:
- x64
steps:
- uses: actions/checkout@v4
- uses: julia-actions/setup-julia@v2
with:
version: ${{ matrix.version }}
arch: ${{ matrix.arch }}
- uses: julia-actions/cache@v1
- uses: julia-actions/julia-buildpkg@v1
- uses: julia-actions/julia-runtest@v1
14 changes: 4 additions & 10 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2"
ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca"
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
Requires = "ae029012-a4dd-5104-9daa-d747884805df"
SimpleUnPack = "ce78b400-467f-4804-87d8-8f486da07d0a"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"

[weakdeps]
Expand All @@ -36,7 +35,7 @@ AdvancedVIEnzymeExt = "Enzyme"
[compat]
ADTypes = "1"
Accessors = "0.1"
Bijectors = "0.13, 0.14"
Bijectors = "0.13, 0.14, 0.15"
ChainRulesCore = "1.16"
DiffResults = "1"
DifferentiationInterface = "0.6"
Expand All @@ -45,29 +44,24 @@ DocStringExtensions = "0.8, 0.9"
Enzyme = "0.13"
FillArrays = "1.3"
ForwardDiff = "0.10"
Functors = "0.4"
Functors = "0.4, 0.5"
LinearAlgebra = "1"
LogDensityProblems = "2"
Mooncake = "0.4"
Optimisers = "0.2.16, 0.3"
Optimisers = "0.2.16, 0.3, 0.4"
ProgressMeter = "1.6"
Random = "1"
Requires = "1.0"
ReverseDiff = "1"
SimpleUnPack = "1.1.0"
StatsBase = "0.32, 0.33, 0.34"
Zygote = "0.6"
julia = "1.7"
julia = "1.10, 1.11.2"

[extras]
Bijectors = "76274a88-744f-5084-9051-94815aaf08c4"
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
Mooncake = "da2b9cff-9c12-43a0-ae48-6db2b0edb7d6"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[targets]
test = ["Pkg", "Test"]
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@ a `LogDensityProblem` can be implemented as

```julia
using LogDensityProblems
using SimpleUnPack

struct NormalLogNormal{MX,SX,MY,SY}
μ_x::MX
Expand Down
6 changes: 3 additions & 3 deletions bench/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"
[compat]
ADTypes = "1"
BenchmarkTools = "1"
Bijectors = "0.13, 0.14"
Bijectors = "0.13, 0.14, 0.15"
Distributions = "0.25.111"
DistributionsAD = "0.6"
Enzyme = "0.13.7"
Expand All @@ -30,10 +30,10 @@ ForwardDiff = "0.10"
InteractiveUtils = "1"
LogDensityProblems = "2"
Mooncake = "0.4.5"
Optimisers = "0.3"
Optimisers = "0.3, 0.4"
Random = "1"
ReverseDiff = "1"
SimpleUnPack = "1"
StableRNGs = "1"
Zygote = "0.6"
julia = "1.10"
julia = "1.10, 1.11.2"
8 changes: 3 additions & 5 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,20 @@ Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
QuasiMonteCarlo = "8a4e6c94-4038-4cdc-81c3-7e6ffdb2a71b"
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
SimpleUnPack = "ce78b400-467f-4804-87d8-8f486da07d0a"
StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c"

[compat]
ADTypes = "1"
AdvancedVI = "0.3"
Bijectors = "0.13.6, 0.14"
Bijectors = "0.13.6, 0.14, 0.15"
Distributions = "0.25"
Documenter = "1"
FillArrays = "1"
ForwardDiff = "0.10"
LogDensityProblems = "2.1.1"
Optimisers = "0.3"
Optimisers = "0.3, 0.4"
Plots = "1"
QuasiMonteCarlo = "0.3"
ReverseDiff = "1"
SimpleUnPack = "1"
StatsFuns = "1"
julia = "1.10"
julia = "1.10, 1.11.2"
1 change: 1 addition & 0 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ makedocs(;
"Variational Families" => "families.md",
"Optimization" => "optimization.md",
],
warnonly=[:missing_docs],
)

deploydocs(; repo="github.com/TuringLang/AdvancedVI.jl", push_preview=true)
9 changes: 4 additions & 5 deletions docs/src/elbo/repgradelbo.md
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,6 @@ using LinearAlgebra
using LogDensityProblems
using Plots
using Random
using SimpleUnPack

using Optimisers
using ADTypes, ForwardDiff
Expand All @@ -143,7 +142,7 @@ struct NormalLogNormal{MX,SX,MY,SY}
end

function LogDensityProblems.logdensity(model::NormalLogNormal, θ)
@unpack μ_x, σ_x, μ_y, Σ_y = model
(; μ_x, σ_x, μ_y, Σ_y) = model
logpdf(LogNormal(μ_x, σ_x), θ[1]) + logpdf(MvNormal(μ_y, Σ_y), θ[2:end])
end

Expand All @@ -168,7 +167,7 @@ L = Diagonal(ones(d));
q0 = AdvancedVI.MeanFieldGaussian(μ, L)

function Bijectors.bijector(model::NormalLogNormal)
@unpack μ_x, σ_x, μ_y, Σ_y = model
(; μ_x, σ_x, μ_y, Σ_y) = model
Bijectors.Stacked(
Bijectors.bijector.([LogNormal(μ_x, σ_x), MvNormal(μ_y, Σ_y)]),
[1:1, 2:1+length(μ_y)])
Expand Down Expand Up @@ -295,7 +294,7 @@ qmcrng = SobolSample(; R=OwenScramble(; base=2, pad=32))
function Distributions.rand(
rng::AbstractRNG, q::MvLocationScale{<:Diagonal,D,L}, num_samples::Int
) where {L,D}
@unpack location, scale, dist = q
(; location, scale, dist) = q
n_dims = length(location)
scale_diag = diag(scale)
unif_samples = QuasiMonteCarlo.sample(num_samples, length(q), qmcrng)
Expand Down Expand Up @@ -337,7 +336,7 @@ savefig("advi_qmc_dist.svg")
function Distributions.rand(
rng::AbstractRNG, q::MvLocationScale{<:Diagonal, D, L}, num_samples::Int
) where {L, D}
@unpack location, scale, dist = q
(; location, scale, dist) = q
n_dims = length(location)
scale_diag = diag(scale)
scale_diag.*rand(rng, dist, n_dims, num_samples) .+ location
Expand Down
5 changes: 2 additions & 3 deletions docs/src/examples.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ Using the `LogDensityProblems` interface, we the model can be defined as follows

```@example elboexample
using LogDensityProblems
using SimpleUnPack

struct NormalLogNormal{MX,SX,MY,SY}
μ_x::MX
Expand All @@ -25,7 +24,7 @@ struct NormalLogNormal{MX,SX,MY,SY}
end

function LogDensityProblems.logdensity(model::NormalLogNormal, θ)
@unpack μ_x, σ_x, μ_y, Σ_y = model
(; μ_x, σ_x, μ_y, Σ_y) = model
return logpdf(LogNormal(μ_x, σ_x), θ[1]) + logpdf(MvNormal(μ_y, Σ_y), θ[2:end])
end

Expand Down Expand Up @@ -59,7 +58,7 @@ Thus, we will use [Bijectors](https://github.com/TuringLang/Bijectors.jl) to mat
using Bijectors

function Bijectors.bijector(model::NormalLogNormal)
@unpack μ_x, σ_x, μ_y, Σ_y = model
(; μ_x, σ_x, μ_y, Σ_y) = model
return Bijectors.Stacked(
Bijectors.bijector.([LogNormal(μ_x, σ_x), MvNormal(μ_y, Σ_y)]),
[1:1, 2:(1 + length(μ_y))],
Expand Down
2 changes: 2 additions & 0 deletions docs/src/optimization.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,5 @@ PolynomialAveraging
```

[^DCAMHV2020]: Dhaka, A. K., Catalina, A., Andersen, M. R., Magnusson, M., Huggins, J., & Vehtari, A. (2020). Robust, accurate stochastic optimization for variational inference. Advances in Neural Information Processing Systems, 33, 10961-10973.
[^KMJ2024]: Khaled, A., Mishchenko, K., & Jin, C. (2023). Dowg unleashed: An efficient universal parameter-free gradient descent method. Advances in Neural Information Processing Systems, 36, 6748-6769.
[^IHC2023]: Ivgi, M., Hinder, O., & Carmon, Y. (2023). Dog is sgd's best friend: A parameter-free dynamic step size schedule. In International Conference on Machine Learning (pp. 14465-14499). PMLR.
1 change: 0 additions & 1 deletion src/AdvancedVI.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@

module AdvancedVI

using SimpleUnPack: @unpack, @pack!
using Accessors

using Random
Expand Down
16 changes: 8 additions & 8 deletions src/families/location_scale.jl
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ function (re::RestructureMeanField)(flat::AbstractVector)
end

function Optimisers.destructure(q::MvLocationScale{<:Diagonal,D,L,E}) where {D,L,E}
@unpack location, scale, dist = q
(; location, scale, dist) = q
flat = vcat(location, diag(scale))
return flat, RestructureMeanField(q)
end
Expand All @@ -69,27 +69,27 @@ Base.size(q::MvLocationScale) = size(q.location)
Base.eltype(::Type{<:MvLocationScale{S,D,L,E}}) where {S,D,L,E} = eltype(D)

function StatsBase.entropy(q::MvLocationScale)
@unpack location, scale, dist = q
(; location, scale, dist) = q
n_dims = length(location)
# `convert` is necessary because `entropy` is not type stable upstream
return n_dims * convert(eltype(location), entropy(dist)) + logdet(scale)
end

function Distributions.logpdf(q::MvLocationScale, z::AbstractVector{<:Real})
@unpack location, scale, dist = q
(; location, scale, dist) = q
return sum(Base.Fix1(logpdf, dist), scale \ (z - location)) - logdet(scale)
end

function Distributions.rand(q::MvLocationScale)
@unpack location, scale, dist = q
(; location, scale, dist) = q
n_dims = length(location)
return scale * rand(dist, n_dims) + location
end

function Distributions.rand(
rng::AbstractRNG, q::MvLocationScale{S,D,L}, num_samples::Int
) where {S,D,L}
@unpack location, scale, dist = q
(; location, scale, dist) = q
n_dims = length(location)
return scale * rand(rng, dist, n_dims, num_samples) .+ location
end
Expand All @@ -98,7 +98,7 @@ end
function Distributions.rand(
rng::AbstractRNG, q::MvLocationScale{<:Diagonal,D,L}, num_samples::Int
) where {L,D}
@unpack location, scale, dist = q
(; location, scale, dist) = q
n_dims = length(location)
scale_diag = diag(scale)
return scale_diag .* rand(rng, dist, n_dims, num_samples) .+ location
Expand All @@ -107,14 +107,14 @@ end
function Distributions._rand!(
rng::AbstractRNG, q::MvLocationScale, x::AbstractVecOrMat{<:Real}
)
@unpack location, scale, dist = q
(; location, scale, dist) = q
rand!(rng, dist, x)
x[:] = scale * x
return x .+= location
end

function Distributions.mean(q::MvLocationScale)
@unpack location, scale = q
(; location, scale) = q
return location + scale * Fill(mean(q.dist), length(location))
end

Expand Down
16 changes: 8 additions & 8 deletions src/families/location_scale_low_rank.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ Base.size(q::MvLocationScaleLowRank) = size(q.location)
Base.eltype(::Type{<:MvLocationScaleLowRank{L,SD,SF,D,E}}) where {L,SD,SF,D,E} = eltype(L)

function StatsBase.entropy(q::MvLocationScaleLowRank)
@unpack location, scale_diag, scale_factors, dist = q
(; location, scale_diag, scale_factors, dist) = q
n_dims = length(location)
scale_diag2 = scale_diag .* scale_diag
UtDinvU = Hermitian(scale_factors' * (scale_factors ./ scale_diag2))
Expand All @@ -63,7 +63,7 @@ end
function Distributions.logpdf(
q::MvLocationScaleLowRank, z::AbstractVector{<:Real}; non_differntiable::Bool=false
)
@unpack location, scale_diag, scale_factors, dist = q
(; location, scale_diag, scale_factors, dist) = q
μ_base = mean(dist)
n_dims = length(location)

Expand All @@ -86,7 +86,7 @@ function Distributions.logpdf(
end

function Distributions.rand(q::MvLocationScaleLowRank)
@unpack location, scale_diag, scale_factors, dist = q
(; location, scale_diag, scale_factors, dist) = q
n_dims = length(location)
n_factors = size(scale_factors, 2)
u_diag = rand(dist, n_dims)
Expand All @@ -97,7 +97,7 @@ end
function Distributions.rand(
rng::AbstractRNG, q::MvLocationScaleLowRank{S,D,L}, num_samples::Int
) where {S,D,L}
@unpack location, scale_diag, scale_factors, dist = q
(; location, scale_diag, scale_factors, dist) = q
n_dims = length(location)
n_factors = size(scale_factors, 2)
u_diag = rand(rng, dist, n_dims, num_samples)
Expand All @@ -108,7 +108,7 @@ end
function Distributions._rand!(
rng::AbstractRNG, q::MvLocationScaleLowRank, x::AbstractVecOrMat{<:Real}
)
@unpack location, scale_diag, scale_factors, dist = q
(; location, scale_diag, scale_factors, dist) = q

rand!(rng, dist, x)
x[:] = scale_diag .* x
Expand All @@ -120,22 +120,22 @@ function Distributions._rand!(
end

function Distributions.mean(q::MvLocationScaleLowRank)
@unpack location, scale_diag, scale_factors = q
(; location, scale_diag, scale_factors) = q
μ = mean(q.dist)
return location +
scale_diag .* Fill(μ, length(scale_diag)) +
scale_factors * Fill(μ, size(scale_factors, 2))
end

function Distributions.var(q::MvLocationScaleLowRank)
@unpack scale_diag, scale_factors = q
(; scale_diag, scale_factors) = q
σ2 = var(q.dist)
return σ2 *
(scale_diag .* scale_diag + sum(scale_factors .* scale_factors; dims=2)[:, 1])
end

function Distributions.cov(q::MvLocationScaleLowRank)
@unpack scale_diag, scale_factors = q
(; scale_diag, scale_factors) = q
σ2 = var(q.dist)
return σ2 * (Diagonal(scale_diag .* scale_diag) + scale_factors * scale_factors')
end
Expand Down
Loading
Loading