Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "AdvancedMH"
uuid = "5b7e9947-ddc0-4b3f-9b55-0d8042f74170"
version = "0.8.9"
version = "0.8.10"

[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
Expand Down
69 changes: 65 additions & 4 deletions src/proposal.jl
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ end

function propose(
rng::Random.AbstractRNG,
proposal::Proposal{<:Function},
proposal::Proposal{<:Function},
model::DensityModelOrLogDensityModel,
t
)
Expand All @@ -129,24 +129,75 @@ end
# Multiple proposals
####################

"""
proposal_dim(p::Proposal)

Return the number of scalar parameters produced by proposal `p`.

Users with custom proposal types should extend this function to enable
mixed-dimension proposal vectors.
"""
proposal_dim(p::Proposal{<:UnivariateDistribution}) = 1
proposal_dim(::Proposal{<:Function}) = 1
Copy link
Copy Markdown
Member

@penelopeysm penelopeysm Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The code in the PR all looks good, and I know I didn't say this in the original issue, but there are still quite a few more blockers to using this upstream in Turing. For example in this line which handles conditional proposals, are we implicitly assuming that the proposal is necessarily conditional on a single element?

Consider e.g.

@model f() = x ~ product_distribution([uni, uni])

where uni is some univariate distribution, and we want to propose new values of x based on the value at the previous step. Then we would need something like

StaticProposal(slice -> MvNormal(slice, I))

Or even something like a matrix distribution, which we would flatten inside an LDF. I don't immediately know how we can represent this in AdvancedMH where everything has to be flattened. I suppose that we would have to use something like a StaticProposal(vec(matrix_dist)).

Again, I don't think this has to be handled in this PR (if anything I actively think it shouldn't be). To be completely honest, I also don't know how to handle this, short of adding another field to StaticProposal that specifies the length. Just wanted to check that I'm understanding correctly

Copy link
Copy Markdown
Member

@penelopeysm penelopeysm Feb 6, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Basically the problem is, if I have a high-level construct like

spl = MH(
    @varname(x) => xprop,
    @varname(y) => yprop,
)

how do we translate that into AdvancedMH. From an LDF we can identify the range of indices that each varname belongs to, so as long as we can translate the individual prop's into AdvancedMH proposals, this PR will let us then stack them together, which is great.

But we still need to be able to translate the props and that can depend quite a bit on what prop is, what x's distribution in the model is, etc. For example if x ~ LKJCholesky() and we want to say 'sample x from the prior', that is quite easy to do in Turing because you have access to the distribution. But in AdvancedMH we would need to translate this into 'sample a vectorised form of x from a vectorised form of LKJCholesky'. Then it gets more complex if you have a conditional distribution etc.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these are tricky interface questions, we'll deal with them when they emerge

on the first comment, my cop out thought is that, if the user is serious about using this type of proposal, they can overload proposal_dim. this is not quite nice, but I am going to leave it there. that said, I added an error message

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

fwiw I'm happy to merge and release this!

proposal_dim(p::Proposal{<:MultivariateDistribution}) = length(p.proposal)
function proposal_dim(p::Proposal{<:AbstractArray})
return sum(d -> d isa UnivariateDistribution ? 1 : length(d), p.proposal)
end

function _vcat_proposals(draws)
return vcat(draws...)
end

"""
_split_params(proposals::AbstractArray{<:Proposal}, params::AbstractVector)

Split a flat parameter vector `params` into chunks matching each proposal's dimension,
as determined by [`proposal_dim`](@ref).
"""
function _split_params(proposals::AbstractArray{<:Proposal}, params::AbstractVector)
total_dim = sum(proposal_dim, proposals)
if total_dim != length(params)
throw(DimensionMismatch(
"sum of proposal dimensions ($total_dim) does not match parameter length ($(length(params))). " *
"For function-valued proposals, `proposal_dim` defaults to 1; override it for your callable type " *
"to specify a different dimension."
))
end
result = Vector{Any}(undef, length(proposals))
offset = 0
for (i, p) in enumerate(proposals)
dim = proposal_dim(p)
if dim == 1
result[i] = params[offset+1]
else
result[i] = params[(offset+1):(offset+dim)]
end
offset += dim
end
return result
end

function propose(
rng::Random.AbstractRNG,
proposals::AbstractArray{<:Proposal},
model::DensityModelOrLogDensityModel,
)
return map(proposals) do proposal
draws = map(proposals) do proposal
return propose(rng, proposal, model)
end
return _vcat_proposals(draws)
end
function propose(
rng::Random.AbstractRNG,
proposals::AbstractArray{<:Proposal},
model::DensityModelOrLogDensityModel,
ts,
ts::AbstractVector,
)
return map(proposals, ts) do proposal, t
split_ts = _split_params(proposals, ts)
draws = map(proposals, split_ts) do proposal, t
return propose(rng, proposal, model, t)
end
return _vcat_proposals(draws)
end

@generated function propose(
Expand Down Expand Up @@ -232,6 +283,16 @@ function logratio_proposal_density(proposals::Tuple, states::Tuple, candidates::
return valfirst + valtail
end

function logratio_proposal_density(
proposals::AbstractArray{<:Proposal}, states::AbstractVector, candidates::AbstractVector
)
split_states = _split_params(proposals, states)
split_candidates = _split_params(proposals, candidates)
return sum(zip(proposals, split_states, split_candidates)) do (proposal, state, candidate)
return logratio_proposal_density(proposal, state, candidate)
end
end

# fallback for general iterators (arrays etc.) - possibly not type stable!
function logratio_proposal_density(proposals, states, candidates)
return sum(zip(proposals, states, candidates)) do (proposal, state, candidate)
Expand Down
46 changes: 46 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,52 @@ include("util.jl")
end
end

@testset "Varying dimension proposals" begin
# Issue #120: proposals of varying dimensions can't be combined in a vector

# Basic reproduce case from the issue: scalar + 2D multivariate
m1 = DensityModel(x -> x[1] + x[2] + x[3])
p1 = [StaticProposal(Normal()), RandomWalkProposal(MvNormal(zeros(2), I))]
chain1 = sample(m1, MetropolisHastings(p1), 10; chain_type=Any, progress=false)
@test length(chain1) == 10
@test chain1[1].params isa AbstractVector
@test length(chain1[1].params) == 3

# Reversed order: 2D multivariate first, then scalar
p2 = [StaticProposal(MvNormal(zeros(2), I)), RandomWalkProposal(Normal())]
chain2 = sample(m1, MetropolisHastings(p2), 10; chain_type=Any, progress=false)
@test length(chain2) == 10
@test length(chain2[1].params) == 3

# Multiple multivariate proposals: 2D + 3D
m3 = DensityModel(x -> sum(x))
p3 = [RandomWalkProposal(MvNormal(zeros(2), I)), RandomWalkProposal(MvNormal(zeros(3), I))]
chain3 = sample(m3, MetropolisHastings(p3), 10; chain_type=Any, progress=false)
@test length(chain3) == 10
@test length(chain3[1].params) == 5

# Backward compatibility: all scalar proposals still work
m4 = DensityModel(x -> x[1] + x[2])
p4 = [StaticProposal(Normal()), RandomWalkProposal(Normal())]
chain4 = sample(m4, MetropolisHastings(p4), 100; chain_type=Any, progress=false)
@test length(chain4) == 100
@test length(chain4[1].params) == 2

# With initial_params
m5 = DensityModel(x -> x[1] + x[2] + x[3])
p5 = [StaticProposal(Normal()), RandomWalkProposal(MvNormal(zeros(2), I))]
chain5 = sample(m5, MetropolisHastings(p5), 10; chain_type=Any, initial_params=[1.0, 2.0, 3.0], progress=false)
@test chain5[1].params == [1.0, 2.0, 3.0]

# Single proposal in a vector should still produce a vector of params
m6 = DensityModel(x -> x[1])
p6 = [StaticProposal(Normal())]
chain6 = sample(m6, MetropolisHastings(p6), 10; chain_type=Any, progress=false)
@test length(chain6) == 10
@test chain6[1].params isa AbstractVector
@test length(chain6[1].params) == 1
end

@testset "MALA" begin
@testset "basic" begin
# Set up the sampler.
Expand Down