From ee923e412c3cf6593a6cab9b014c327c0da2fa7a Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 2 Mar 2023 20:45:23 +0000 Subject: [PATCH] CompatHelper: bump compat for AbstractPPL to 0.6 for package test, (keep existing compat) (#469) * Fixed a typo in tutorial (#451) * CompatHelper: bump compat for Turing to 0.24 for package turing, (keep existing compat) (#450) This pull request changes the compat entry for the `Turing` package from `0.21` to `0.21, 0.24` for package turing. This keeps the compat entries for earlier versions. Note: I have not tested your package with this new compat entry. It is your responsibility to make sure that your package tests pass before you merge this pull request. Co-authored-by: Hong Ge <3279477+yebai@users.noreply.github.com> * Some minor utility improvements (#452) This PR does the following: - Moves the `varname_leaves` from `TestUtils` to main module. - It can be very useful in Turing.jl for constructing `Chains` and the like, so I think it's a good idea to make it part of the main module rather than keeping it "hidden" there. - Makes the default `varinfo` in the constructor of `LogDensityFunction` be `model.context` rather than a new `DynamicPPL.DefaultContext`. - The `context` pass to `evaluate!!` will override the leaf-context in `model.context`, and so the current default constructor always uses `DefaultContext` as the leaf-context, even if the `Model` has been `contextualize`d with some other leaf-context, e.g. `PriorContext`. This PR fixes this issue. * Always run CI (#453) I find the current `bors` workflow a bit tedious. Most of the time, I summon `bors` to see the CI results (see e.g. https://github.com/TuringLang/DynamicPPL.jl/pull/438). Given that most `CI` tests are quick (< 10mins), we can always run them by default. The most time-consuming `IntegrationTests` is still run by `bors` to avoid excessive CI runs. * Compat with new Bijectors.jl (#454) This PR makes DPPL compatible with the changes to come in https://github.com/TuringLang/Bijectors.jl/pull/214. Tests are passing locally. Closes https://github.com/TuringLang/DynamicPPL.jl/pull/455 Closes https://github.com/TuringLang/DynamicPPL.jl/pull/456 * Another Bijectors.jl compat bound bump (#457) * CompatHelper: bump compat for MCMCChains to 6 for package test, (keep existing compat) (#467) This pull request changes the compat entry for the `MCMCChains` package from `4.0.4, 5` to `4.0.4, 5, 6` for package test. This keeps the compat entries for earlier versions. Note: I have not tested your package with this new compat entry. It is your responsibility to make sure that your package tests pass before you merge this pull request. Co-authored-by: Hong Ge <3279477+yebai@users.noreply.github.com> * CompatHelper: bump compat for AbstractPPL to 0.6 for package test, (keep existing compat) --------- Co-authored-by: Hong Ge <3279477+yebai@users.noreply.github.com> Co-authored-by: github-actions[bot] Co-authored-by: Tor Erlend Fjelde --- .github/workflows/CI.yml | 3 ++ Project.toml | 4 +-- docs/src/tutorials/prob-interface.md | 4 +-- src/abstract_varinfo.jl | 9 +++--- src/logdensityfunction.jl | 10 ++++-- src/simple_varinfo.jl | 4 +-- src/test_utils.jl | 24 ++------------- src/utils.jl | 46 ++++++++++++++++++++++++++++ test/Project.toml | 8 ++--- test/simple_varinfo.jl | 3 ++ test/test_util.jl | 7 ++--- test/turing/Project.toml | 2 +- test/turing/compiler.jl | 2 +- 13 files changed, 81 insertions(+), 45 deletions(-) diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml index f1da30a13..04fbd2733 100644 --- a/.github/workflows/CI.yml +++ b/.github/workflows/CI.yml @@ -9,6 +9,9 @@ on: - trying # Build the master branch. - master + pull_request: + branches: + - master jobs: test: diff --git a/Project.toml b/Project.toml index a392f5e70..9e0813e22 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "DynamicPPL" uuid = "366bfd00-2699-11ea-058f-f148b4cae6d8" -version = "0.21.5" +version = "0.22.1" [deps] AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001" @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" AbstractMCMC = "2, 3.0, 4" AbstractPPL = "0.5.3, 0.6" BangBang = "0.3" -Bijectors = "0.5.2, 0.6, 0.7, 0.8, 0.9, 0.10" +Bijectors = "0.11, 0.12" ChainRulesCore = "0.9.7, 0.10, 1" ConstructionBase = "1" Distributions = "0.23.8, 0.24, 0.25" diff --git a/docs/src/tutorials/prob-interface.md b/docs/src/tutorials/prob-interface.md index 5ff85b996..9d3b5c6bd 100644 --- a/docs/src/tutorials/prob-interface.md +++ b/docs/src/tutorials/prob-interface.md @@ -20,7 +20,7 @@ end nothing # hide ``` -We generate some data using `μ = 0` and `σ = 1`: +We generate some data using `μ = 0`: ```@example probinterface Random.seed!(1776) @@ -35,7 +35,7 @@ Conditioning takes a variable and fixes its value as known. We do this by passing a model and a collection of conditioned variables to [`|`](@ref) or its alias [`condition`](@ref): ```@example probinterface -model = gdemo(length(dataset)) | (x=dataset, μ=0, σ=1) +model = gdemo(length(dataset)) | (x=dataset, μ=0) nothing # hide ``` diff --git a/src/abstract_varinfo.jl b/src/abstract_varinfo.jl index 8c1dd88d4..acd51e288 100644 --- a/src/abstract_varinfo.jl +++ b/src/abstract_varinfo.jl @@ -405,7 +405,7 @@ end # Vector-based ones. function link!!( - t::StaticTransformation{<:Bijectors.Bijector{1}}, + t::StaticTransformation{<:Bijectors.Transform}, vi::AbstractVarInfo, spl::AbstractSampler, model::Model, @@ -420,7 +420,7 @@ function link!!( end function invlink!!( - t::StaticTransformation{<:Bijectors.Bijector{1}}, + t::StaticTransformation{<:Bijectors.Transform}, vi::AbstractVarInfo, spl::AbstractSampler, model::Model, @@ -452,9 +452,8 @@ julia> using DynamicPPL, Distributions, Bijectors julia> @model demo() = x ~ Normal() demo (generic function with 2 methods) -julia> # By subtyping `Bijector{1}`, we inherit the `(inv)link!!` defined for - # bijectors which acts on 1-dimensional arrays, i.e. vectors. - struct MyBijector <: Bijectors.Bijector{1} end +julia> # By subtyping `Transform`, we inherit the `(inv)link!!`. + struct MyBijector <: Bijectors.Transform end julia> # Define some dummy `inverse` which will be used in the `link!!` call. Bijectors.inverse(f::MyBijector) = identity diff --git a/src/logdensityfunction.jl b/src/logdensityfunction.jl index 2c836ca07..007dfef11 100644 --- a/src/logdensityfunction.jl +++ b/src/logdensityfunction.jl @@ -10,7 +10,7 @@ $(FIELDS) ```jldoctest julia> using Distributions -julia> using DynamicPPL: LogDensityFunction +julia> using DynamicPPL: LogDensityFunction, contextualize julia> @model function demo(x) m ~ Normal() @@ -36,6 +36,12 @@ julia> # By default it uses `VarInfo` under the hood, but this is not necessary. julia> LogDensityProblems.logdensity(f, [0.0]) -2.3378770664093453 + +julia> # This also respects the context in `model`. + f_prior = LogDensityFunction(contextualize(model, DynamicPPL.PriorContext()), VarInfo(model)); + +julia> LogDensityProblems.logdensity(f_prior, [0.0]) == logpdf(Normal(), 0.0) +true ``` """ struct LogDensityFunction{V,M,C} @@ -60,7 +66,7 @@ end function LogDensityFunction( model::Model, varinfo::AbstractVarInfo=VarInfo(model), - context::AbstractContext=DefaultContext(), + context::AbstractContext=model.context, ) return LogDensityFunction(varinfo, model, context) end diff --git a/src/simple_varinfo.jl b/src/simple_varinfo.jl index 4b345a6ff..a445bf87a 100644 --- a/src/simple_varinfo.jl +++ b/src/simple_varinfo.jl @@ -648,7 +648,7 @@ Distributions.loglikelihood(model::Model, θ) = loglikelihood(model, SimpleVarIn # Allow usage of `NamedBijector` too. function link!!( - t::StaticTransformation{<:Bijectors.NamedBijector}, + t::StaticTransformation{<:Bijectors.NamedTransform}, vi::SimpleVarInfo{<:NamedTuple}, spl::AbstractSampler, model::Model, @@ -663,7 +663,7 @@ function link!!( end function invlink!!( - t::StaticTransformation{<:Bijectors.NamedBijector}, + t::StaticTransformation{<:Bijectors.NamedTransform}, vi::SimpleVarInfo{<:NamedTuple}, spl::AbstractSampler, model::Model, diff --git a/src/test_utils.jl b/src/test_utils.jl index 605952d88..45b9fff07 100644 --- a/src/test_utils.jl +++ b/src/test_utils.jl @@ -10,26 +10,8 @@ using Random: Random using Bijectors: Bijectors using Setfield: Setfield -""" - varname_leaves(vn::VarName, val) - -Return iterator over all varnames that are represented by `vn` on `val`, -e.g. `varname_leaves(@varname(x), rand(2))` results in an iterator over `[@varname(x[1]), @varname(x[2])]`. -""" -varname_leaves(vn::VarName, val::Real) = [vn] -function varname_leaves(vn::VarName, val::AbstractArray{<:Union{Real,Missing}}) - return ( - VarName(vn, DynamicPPL.getlens(vn) ∘ Setfield.IndexLens(Tuple(I))) for - I in CartesianIndices(val) - ) -end -function varname_leaves(vn::VarName, val::AbstractArray) - return Iterators.flatten( - varname_leaves( - VarName(vn, DynamicPPL.getlens(vn) ∘ Setfield.IndexLens(Tuple(I))), val[I] - ) for I in CartesianIndices(val) - ) -end +# For backwards compat. +using DynamicPPL: varname_leaves """ update_values!!(vi::AbstractVarInfo, vals::NamedTuple, vns) @@ -704,7 +686,7 @@ Simple model for which [`default_transformation`](@ref) returns a [`StaticTransf end function DynamicPPL.default_transformation(::Model{typeof(demo_static_transformation)}) - b = Bijectors.stack(Bijectors.Exp{0}(), Bijectors.Identity{0}()) + b = Bijectors.stack(Bijectors.elementwise(exp), identity) return DynamicPPL.StaticTransformation(b) end diff --git a/src/utils.jl b/src/utils.jl index 8f076efee..78595cb90 100644 --- a/src/utils.jl +++ b/src/utils.jl @@ -740,3 +740,49 @@ infer_nested_eltype(::Type{<:AbstractDict{<:Any,ET}}) where {ET} = infer_nested_ # No need + causes issues for some AD backends, e.g. Zygote. ChainRulesCore.@non_differentiable infer_nested_eltype(x) + +""" + varname_leaves(vn::VarName, val) + +Return an iterator over all varnames that are represented by `vn` on `val`. + +# Examples +```jldoctest +julia> using DynamicPPL: varname_leaves + +julia> foreach(println, varname_leaves(@varname(x), rand(2))) +x[1] +x[2] + +julia> foreach(println, varname_leaves(@varname(x[1:2]), rand(2))) +x[1:2][1] +x[1:2][2] + +julia> x = (y = 1, z = [[2.0], [3.0]]); + +julia> foreach(println, varname_leaves(@varname(x), x)) +x.y +x.z[1][1] +x.z[2][1] +``` +""" +varname_leaves(vn::VarName, ::Real) = [vn] +function varname_leaves(vn::VarName, val::AbstractArray{<:Union{Real,Missing}}) + return ( + VarName(vn, getlens(vn) ∘ Setfield.IndexLens(Tuple(I))) for + I in CartesianIndices(val) + ) +end +function varname_leaves(vn::VarName, val::AbstractArray) + return Iterators.flatten( + varname_leaves(VarName(vn, getlens(vn) ∘ Setfield.IndexLens(Tuple(I))), val[I]) for + I in CartesianIndices(val) + ) +end +function varname_leaves(vn::DynamicPPL.VarName, val::NamedTuple) + iter = Iterators.map(keys(val)) do sym + lens = Setfield.PropertyLens{sym}() + varname_leaves(vn ∘ lens, get(val, lens)) + end + return Iterators.flatten(iter) +end diff --git a/test/Project.toml b/test/Project.toml index 283b37fda..f8aa7038a 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -22,17 +22,17 @@ Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] AbstractMCMC = "2.1, 3.0, 4" -AbstractPPL = "0.5.1, 0.6" -Bijectors = "0.9.5, 0.10" +AbstractPPL = "0.5, 0.6" +Bijectors = "0.11, 0.12" Distributions = "0.25" DistributionsAD = "0.6.3" Documenter = "0.26.1, 0.27" ForwardDiff = "0.10.12" LogDensityProblems = "2" -MCMCChains = "4.0.4, 5" +MCMCChains = "4.0.4, 5, 6" MacroTools = "0.5.5" Setfield = "0.7.1, 0.8, 1" StableRNGs = "1" -Tracker = "0.2.11" +Tracker = "0.2.23" Zygote = "0.5.4, 0.6" julia = "1.6" diff --git a/test/simple_varinfo.jl b/test/simple_varinfo.jl index 9a7f6e549..a5b57f5f6 100644 --- a/test/simple_varinfo.jl +++ b/test/simple_varinfo.jl @@ -64,6 +64,7 @@ @testset "$(typeof(vi))" for vi in ( SimpleVarInfo(Dict()), SimpleVarInfo(values_constrained), VarInfo(model) ) + vi = SimpleVarInfo(values_constrained) for vn in DynamicPPL.TestUtils.varnames(model) vi = DynamicPPL.setindex!!(vi, get(values_constrained, vn), vn) end @@ -108,6 +109,8 @@ @testset "SimpleVarInfo on $(nameof(model))" for model in DynamicPPL.TestUtils.DEMO_MODELS + model = DynamicPPL.TestUtils.demo_dot_assume_matrix_dot_observe_matrix() + # We might need to pre-allocate for the variable `m`, so we need # to see whether this is the case. svi_nt = SimpleVarInfo(rand(NamedTuple, model)) diff --git a/test/test_util.jl b/test/test_util.jl index 667412bca..f3e54c437 100644 --- a/test/test_util.jl +++ b/test/test_util.jl @@ -13,11 +13,8 @@ function test_model_ad(model, logp_manual) x = DynamicPPL.getall(vi) # Log probabilities using the model. - function logp_model(x) - new_vi = VarInfo(vi, SampleFromPrior(), x) - model(new_vi) - return getlogp(new_vi) - end + ℓ = DynamicPPL.LogDensityFunction(model, vi) + logp_model = Base.Fix1(LogDensityProblems.logdensity, ℓ) # Check that both functions return the same values. lp = logp_manual(x) diff --git a/test/turing/Project.toml b/test/turing/Project.toml index 26d34fb3d..5bda1e2dd 100644 --- a/test/turing/Project.toml +++ b/test/turing/Project.toml @@ -6,5 +6,5 @@ Turing = "fce5fe82-541a-59a6-adf8-730c64b5f9a0" [compat] DynamicPPL = "0.20, 0.21" -Turing = "0.21" +Turing = "0.21, 0.22, 0.23, 0.24" julia = "1.6" diff --git a/test/turing/compiler.jl b/test/turing/compiler.jl index 0b2b5362d..4a864a84f 100644 --- a/test/turing/compiler.jl +++ b/test/turing/compiler.jl @@ -70,7 +70,7 @@ x = Float64[1 2] @model function gauss(x) - priors = TArray{Float64}(2) + priors = Array{Float64}(undef, 2) priors[1] ~ InverseGamma(2, 3) # s priors[2] ~ Normal(0, sqrt(priors[1])) # m for i in 1:length(x)