-
Notifications
You must be signed in to change notification settings - Fork 33
Description
Hi,
I followed the tutorial to define a custom log-likelihood to give that to bat_sample(). My likelihood seems to work fine, however, bat_sample() doesn't work for me. After some time I get the error below:
AssertionError: length(chains) == nchains Stacktrace: [1] mcmc_init!(rng::Random123.Philox4x{UInt64, 10}, algorithm::MetropolisHastings{BAT.MvTDistProposal, RepetitionWeighting{Int64}, AdaptiveMHTuning}, density::PosteriorDensity{BAT.TransformedDensity{BAT.GenericDensity{var"#36#37"{Vector{Float64}, typeof(pdf)}}, BAT.DistributionTransform{ValueShapes.StructVariate{NamedTuple{(:offset, :resolution, :k)}}, BAT.InfiniteSpace, BAT.MixedSpace, BAT.StandardMvNormal{Float64}, NamedTupleDist{(:offset, :resolution, :k), Tuple{Product{Continuous, Uniform{Float64}, Vector{Uniform{Float64}}}, Uniform{Float64}, Uniform{Float64}}, Tuple{ValueAccessor{ArrayShape{Real, 1}}, ValueAccessor{ScalarShape{Real}}, ValueAccessor{ScalarShape{Real}}}}}, BAT.TDNoCorr}, BAT.DistributionDensity{BAT.StandardMvNormal{Float64}, BAT.HyperRectBounds{Float32}}, BAT.HyperRectBounds{Float32}, ArrayShape{Real, 1}}, nchains::Int64, init_alg::MCMCChainPoolInit, tuning_alg::AdaptiveMHTuning, nonzero_weights::Bool, callback::Function) @ BAT ~/.julia/packages/BAT/XvOy6/src/samplers/mcmc/chain_pool_init.jl:160
Any idea what could be the issue?
Reproducer:
using Random, LinearAlgebra, Statistics, Distributions, StatsBase, Plots
using BAT, IntervalSets, ValueShapes, TypedTables
import SpecialFunctions
using Profile, ProfileView, QuadGK
## Error function.
function my_erf(x,coeff,base)
return coeff/2*(1 .+ SpecialFunctions.erf.(x)) .+ base
[mcmc_BAT_Julia.pdf](https://github.com/bat/BAT.jl/files/11219143/mcmc_BAT_Julia.pdf)
end
## Step function.
function my_step(x,coeff,base)
return coeff/2*(1 .+ sign.(x)) .+ base
end
## Model (sum of two errors or steps).
function count(p::NamedTuple{(:offset, :resolution, :k)}, x)
step1_coeff = 6+p.k
step2_coeff = 2-p.k
if p.resolution> 0.0000001
step1_x = (x .- p.offset[1])/(sqrt(2)*p.resolution)
step1 = my_erf(step1_x,step1_coeff,4)
step2_x = (x .- p.offset[2])/(sqrt(2)*p.resolution)
step2 = my_erf(step2_x,step2_coeff,0.)
else
step1_x = (x .- p.offset[1])
step1 = my_step(step1_x,step1_coeff,4)
step2_x = (x .- p.offset[2])
step2 = my_step(step2_x,step2_coeff,0)
end
return step1+step2
end
## Integral of model
function get_integral(p::NamedTuple{(:offset, :resolution, :k)},low, high)
total,error = quadgk(x -> count(p,x),low,high)
return total
end
## PDF
function pdf(p::NamedTuple{(:offset, :resolution, :k)},x,low, high)
total = get_integral(p,low,high)
value = count(p,x)
return value/total
end
## Sampler
function sampler(p::NamedTuple{(:offset, :resolution, :k)},n,low,high)
max = count(p,high)
arr = Array{Float64}(undef, n)
i = 1
while i<=n
y = rand()*max
x = rand()*(high-low)+low
if y <= count(p, x)
arr[i] = x
i+=1
end
end
return arr
end
true_par_values = (offset = [99, 150], resolution = 5, k = 0)
arr = sampler(true_par_values,10000,0,500)
## Unbinned log-likelihood
likelihood = let data = arr, f =pdf
params -> begin
function event_log_likelihood(event)
log(f(params,event,0,500))
end
return LogDVal(mapreduce(event_log_likelihood, +, data))
end
end
## Flat priors
prior = NamedTupleDist(
offset = [Uniform(50, 150), Uniform(80, 220)],
resolution = Uniform(0,20),
k = Uniform(-6,2)
)
## Posterior
posterior = PosteriorDensity(likelihood, prior)
## running bat_sample
samples = bat_sample(posterior, MCMCSampling(mcalg = MetropolisHastings(), nsteps = 10^3)).result