diff --git a/__marimo__/session/marimo_test.py.json b/__marimo__/session/marimo_test.py.json
new file mode 100644
index 000000000..cd4bb431a
--- /dev/null
+++ b/__marimo__/session/marimo_test.py.json
@@ -0,0 +1,28 @@
+{
+ "version": "1",
+ "metadata": {
+ "marimo_version": "0.18.3"
+ },
+ "cells": [
+ {
+ "id": "Hbol",
+ "code_hash": "072b56a0ec87f792811305870b9d8722",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/vnd.marimo+mimebundle": {}
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ }
+ ]
+}
\ No newline at end of file
diff --git a/__marimo__/session/marimo_tutorial.py.json b/__marimo__/session/marimo_tutorial.py.json
new file mode 100644
index 000000000..5f027a175
--- /dev/null
+++ b/__marimo__/session/marimo_tutorial.py.json
@@ -0,0 +1,7 @@
+{
+ "version": "1",
+ "metadata": {
+ "marimo_version": "0.18.3"
+ },
+ "cells": []
+}
\ No newline at end of file
diff --git a/docs/tutorials/__marimo__/session/marimo_tutorial.py.json b/docs/tutorials/__marimo__/session/marimo_tutorial.py.json
new file mode 100644
index 000000000..fb810ad7c
--- /dev/null
+++ b/docs/tutorials/__marimo__/session/marimo_tutorial.py.json
@@ -0,0 +1,3703 @@
+{
+ "version": "1",
+ "metadata": {
+ "marimo_version": "0.18.3"
+ },
+ "cells": [
+ {
+ "id": "MJUe",
+ "code_hash": "b744b10707e1ed1f83d64011cbfce178",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "vblA",
+ "code_hash": "3cd798fdba347ae205aec504e4b1fd9d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "This tutorial provides a comprehensive introduction to the HSSM package for Hierarchical Bayesian Estimation of Sequential Sampling Models. \nTo make the most of the tutorial, let us cover the functionality of the key supporting packages that we use along the way. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "bkHC",
+ "code_hash": "b3a5ab143bbb83c945bef42d4be18a82",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Colab Instructions \nIf you would like to run this tutorial on Google colab, please click this link . \nOnce you are in the colab , follow the installation instructions below and then restart your runtime . \nJust uncomment the code in the next code cell and run it! \nNOTE : \nYou may want to switch your runtime to have a GPU or TPU. To do so, go to Runtime > Change runtime type and select the desired hardware accelerator. \nNote that if you switch your runtime you have to follow the installation instructions again. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "PKri",
+ "code_hash": "e06d4a677f503002c7f346d04e8f4c6b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Basic Imports "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "SFPL",
+ "code_hash": "a60459dd15e9e437992b6a6f05d3bd2d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Data Simulation \nWe will rely on the ssms package for data simulation repeatedly. Let's look at a basic isolated use case below. \nAs an example, let's use ssms to simulate from the basic Drift Diffusion Model (a running example in this tutorial). \n \n\nIf you are not familiar with the DDM. For now just consider that it has four parameters. \n\nv the drift rate \na the boundary separation \nt the non-decision time \nz the a priori decision bias (starting point) \n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "BYtC",
+ "code_hash": "2b5981acde95188f05d550d649a6a688",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Using simulate_data() "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "RGSE",
+ "code_hash": "526250fe8eef7597644f5dc4bf95cf88",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "HSSM comes with a basic simulator function supplied the simulate_data() function. We can use this function to create synthetic datasets. \nBelow we show the most basic usecase: \nWe wish to generate 500 datapoints (trials) from the standard Drift Diffusion Model with a fixed parameters, v = 0.5, a = 1.5, z = 0.5, t = 0.5. \nNote : \nIn the course of the tutorial, we will see multiple strategies for synthetic dataset generation, this being the most straightforward one. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "emfo",
+ "code_hash": "03b076b1557a532e2554298fd029c461",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "If instead you wish to supply a parameter that varies by trial (a lot more on this later), you can simply supply a vector of parameters to the theta dictionary, when calling the simulator. \nNote : \nThe size argument conceptually functions as number of synthetic datasets . So if you supply a parameter as a (1000,) vector, then the simulator assumes that one dataset consists of 1000 trials, hence if we set the size = 1 as below, we expect in return a dataset with 1000 trials. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "nWHF",
+ "code_hash": "cbe65eea932071f0de552826df9ccb81",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "If we wish to simulate from another model, we can do so by changing the model string. \nThe number of models we can simulate differs from the number of models for which we have likelihoods available (both will increase over time). To get the models for which likelihood functions are supplied out of the box, we should inspect hssm.HSSM.supported_models. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZHCJ",
+ "code_hash": "c5315e08cf739fa8498b5d19d651e7af",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "If we wish to check more detailed information about a given supported model, we can use the accessor get_default_model_config under hssm.modelconfig. For example, we inspect ddm model configuration below. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "qnkX",
+ "code_hash": "0450c543509ca2b1c7ceeca19767df9c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "This dictionary contains quite a bit of information. For purposes of simulating data from a given model , we will highlight two aspects: \n\nThe key list_of_params provides us with the necessary information to define out theta dictionary \nThe bounds key inside the likelihoods sub-dictionaries, provides us with an indication of reasonable parameter values. \n \nThe likelihoods dictionary inhabits three sub-directories for the ddm model, since we have all three, an analytical, an approx_differentiable (LAN) and a blackbox likelihood available. For many models, we will be able to access only one or two types of likelihoods. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "TqIu",
+ "code_hash": "2207d2a3fdc9f5bf21716b6c5d28bbe4",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Using ssm-simulators \nInternally, HSSM natively makes use of the ssm-simulators package for forward simulation of models.\nhssm.simulate_data() functions essentially as a convenience-wrapper. \nBelow we illustrate how to simulate data using the ssm-simulators package directly, to generate an equivalent dataset as created above. We will use the third way of passing parameters to the simulator, which is as a parameter-matrix . \nNotes : \n\nIf you pass parameters as a parameter matrix, make sure the column ordering is correct. You can follow the parameter ordering under hssm.defaults.default_model_config['ddm']['list_params']. \nThis is a minimal example, for more information about the package, check the associated github-page . \n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "DnEU",
+ "code_hash": "13bb70689fdcef156be3630c96d4efe8",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We will stick to hssm.simulate_data() in this tutorial, to keep things simple. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ulZA",
+ "code_hash": "560a650e2bb9f77dd770f10b58b77ff6",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "ArviZ for Plotting \n \n\nWe use the ArviZ package for most of our plotting needs.\nArviZ is a useful aid for plotting when doing anything Bayesian. \nIt works with HSSM out of the box, by virtue of HSSMs reliance on PyMC for model construction and sampling. \nChecking out the ArviZ Documentation is a good idea to give you communication superpowers for not only your HSSM results, but also other libraries in the Bayesian Toolkit such as NumPyro or STAN . \nWe will see ArviZ plots throughout the notebook. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ecfG",
+ "code_hash": "676d2f2cecc3125737e9150b214d3e61",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Main Tutorial "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Pvdt",
+ "code_hash": "326e6372f09a3b87d8e9d80a400397e4",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Initial Dataset "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZBYS",
+ "code_hash": "83705df43e705f57f1ccd2dad5b0e0cb",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's proceed to simulate a simple dataset for our first example. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "nHfw",
+ "code_hash": "7716a09eeeeb7f64bea5317e4f85d081",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "First HSSM Model \nIn this example we will use the analytical likelihood function computed as suggested in this paper . "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "xXTn",
+ "code_hash": "a74617ba3519fe82610150dc4a4d1d7c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Instantiate the model \nTo instantiate our HSSM class, in the simplest version, we only need to provide an appropriate dataset.\nThe dataset is expected to be a pandas.DataFrame with at least two columns, respectively called rt (for reaction time) and response.\nOur data simulated above is already in the correct format, so let us try to construct the class. \nNOTE: \nIf you are a user of the HDDM python package, this workflow should seem very familiar. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "NCOB",
+ "code_hash": "6de2f8fd488d9a327179cfc46e122a44",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "The print() function gives us some basic information about our model including the number of observations the parameters in the model and their respective prior setting . We can also create a nice little graphical representation of our model... "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "aqbW",
+ "code_hash": "1d8ac96f6273e8d6c9371ff5e88bdca8",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Model Graph \nSince HSSM creates a PyMC Model, we can can use the .graph() function, to get a graphical representation of the the model we created. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "TXez",
+ "code_hash": "038a76152d71d7a289ba475a1a383ed8",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "This is the simplest model we can build. The graph above follows plate notation , commonly used for probabilistic graphical models . \n\nWe have our basic parameters (unobserved, white nodes ), these are random variables in the model and we want to estimate them \nOur observed reaction times and choices (SSMRandomVariable, grey node ), are fixed (or conditioned on). \nRounded rectangles provide us with information about dimensionality of objects \nRectangles with sharp edges represent deterministic , but computed quantities (not shown here, but in later models) \n \nThis notation is helpful to get a quick overview of the structure of a given model we construct. \nThe graph() function of course becomes a lot more interesting and useful for more complicated models! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "dNNg",
+ "code_hash": "35949bc382a80073dbac384bb0f2f1ed",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Sample from the Model \nWe can now call the .sample() function, to get posterior samples. The main arguments you may want to change are listed in the function call below. \nImportantly, multiple backends are possible. We choose the nuts_numpyro backend below,\nwhich in turn compiles the model to a JAX function. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "wlCL",
+ "code_hash": "60d9155303f8a4a929a0bdc9d386fcd5",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We sampled from the model, let's look at the output... "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "wAgl",
+ "code_hash": "f21bd128060406d30020b18987768785",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Errr... a closer look might be needed here! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "rEll",
+ "code_hash": "36097423dad9390a1c83a90d4eecb55b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Inference Data / What gets returned from the sampler? \nThe sampler returns an ArviZ InferenceData object. \nTo understand all the logic behind these objects and how they mesh with the Bayesian Workflow, we refer you to the ArviZ Documentation . \nInferenceData is build on top of xarrays . The xarray documentation will help you understand in more detail how to manipulate these objects. \nBut let's take a quick high-level look to understand roughly what we are dealing with here! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "SdmI",
+ "code_hash": "4f864b3f48fe545119c7e57b758f2a31",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We see that in our case, infer_data_simple_ddm_model contains four basic types of data (note: this is extensible!) \n\nposterior \nlog_likelihood \nsample_stats \nobserved_data \n \nThe posterior object contains our traces for each of the parameters in the model. The log_likelihood field contains the trial wise log-likelihoods for each sample from the posterior. The sample_stats field contains information about the sampler run. This can be important for chain diagnostics, but we will not dwell on this here. Finally we retreive our observed_data. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "lgWD",
+ "code_hash": "76cc86595fd74b5620c8150754f22ff0",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Basic Manipulation "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "yOPj",
+ "code_hash": "dc11ff09e8ffab08c1699e523e9352a7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Accessing groups and variables "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "urSm",
+ "code_hash": "8dd1ee80a93ba57f7d943db89526e510",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "To simply access the underlying data as a numpy.ndarray, we can use .values (as e.g. when using pandas.DataFrame objects). "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "CcZR",
+ "code_hash": "08adb26fc6de99adb0e98e9c2d9b9b5a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Combine chain and draw dimension \nWhen operating directly on the xarray, you will often find it useful to collapse the chain and draw coordinates into a single coordinate.\nArviz makes this easy via the extract method. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zlud",
+ "code_hash": "a51b86ffa2e1fa2e5f72dc23d029fc6b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Since Arviz really just calls the .stack() method from xarray , here the corresponding example using the lower level xarray interface. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "xvXZ",
+ "code_hash": "e2c1b8fa553d58be239622b927c1f1ba",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Making use of ArviZ \nWorking with the InferenceData directly, is very helpful if you want to include custom computations into your workflow.\nFor a basic Bayesian Workflow however, you will often find that standard functionality available through ArviZ \nsuffices. \nBelow we provide a few examples of useful Arviz outputs, which come handy for analyzing your traces (MCMC samples). \nSummary table \nLet's take a look at a summary table for our posterior. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "YECM",
+ "code_hash": "14e3dc1d9c4ae7dcf4dbadd761eb2d36",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "This table returns the parameter-wise mean of our posterior and a few extra statistics. \nOf these extra statistics, the one-stop shop for flagging convergence issues is the r_hat value, which\nis reported in the right-most column. \nTo navigate this statistic, here is a rule of thumb widely used in applied Bayesian statistics. \nIf you find an r_hat value > 1.01, it warrants investigation. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "cEAS",
+ "code_hash": "5c1969d0dc408f1a49f5df69614126b9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Trace plot "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "EJmg",
+ "code_hash": "168a45c60e578d2fa5d2bb335ad1ef68",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "The .sample() function also sets a trace attribute, on our hssm class, so instead, we could call the plot like so: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "vEBW",
+ "code_hash": "a55bb3b77fd95bab7afeec895046a7c4",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "In this tutorial we are most often going to use the latter way of accessing the traces, but there is no preferred option. \nLet's look at a few more plots. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "kLmu",
+ "code_hash": "44953874e98f6450a494ec7a35106635",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Forest Plot \nThe forest plot is commonly used for a quick visual check of the marginal posteriors. It is very effective for intuitive communication of results. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "dxZZ",
+ "code_hash": "272311154b8cd53b4b850d65538469e9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Combining Chains \nBy default, chains are separated out into separate caterpillars , however\nsometimes, especially if you are looking at a forest plot which includes many posterior parameters at once, you want to declutter and collapse the chains into single caterpillars.\nIn this case you can combine chains instead. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "TTti",
+ "code_hash": "d12c299b9c1865806f6f2709ec9f863d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Basic Marginal Posterior Plot \nAnother way to view the marginal posteriors is provided by the plot_posterior() function. It shows the mean and by default the ||(94\\%||) HDIs. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IaQp",
+ "code_hash": "98cb626c91c7374507d0c819132efd3c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Especially for parameter recovery studies, you may want to include reference values for the parameters of interest. \nYou can do so using the ref_val argument. See the example below: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "fCoF",
+ "code_hash": "92822dde383e897b13b183185f0a72f3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Since it is sometimes useful, especially for more complex cases, below an alternative approach in which we pass ref_val as a dictionary. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zVRe",
+ "code_hash": "8e7d34d04503fbe7583163607bcba304",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Posterior Pair Plot "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "woaO",
+ "code_hash": "0b01b4510c5ccbef6c2874cfcd51f6ad",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "The posterior pair plot show us bi-variate traceplots and is useful to check for simple parameter tradeoffs that may emerge. The simplest (linear) tradeoff may be a high correlation between two parameters.\nThis can be very helpful in diagnosing sampler issues for example. If such tradeoffs exist, one often see extremely wide marginal distributions . \nIn our ddm example, we see a little bit of a tradeoff between a and t, as well as between v and z, however nothing concerning. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "wadT",
+ "code_hash": "26c25e87ada51e6de06fa9d5a616a938",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "The few plot we showed here are just the beginning: ArviZ has a much broader spectrum of graphs and other convenience function available. Just check the documentation . "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "VCRE",
+ "code_hash": "d8fdf9a76132d9cfce86594a906398cc",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Compute Quantities from idata \nExample: Mean and Covariance of Posterior Parameters \nAs a simple example, let us calculate the covariance matrix for our posterior samples. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "PSUk",
+ "code_hash": "61f5fc6954cb49a1daed4e2b1b796a49",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "HSSM Model based on LAN likelihood \nWith HSSM you can switch between pre-supplied models with a simple change of argument. The type of likelihood that will be accessed might change in the background for you. \nHere we see an example in which the underlying likelihood is now a LAN . \nWe will talk more about different types of likelihood functions and backends later in the tutorial. For now just keep the following in mind: \nThere are three types of likelihoods \n\nanalytic \napprox_differentiable \nblackbox \n \nTo check which type is used in your HSSM model simple type: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "vGiW",
+ "code_hash": "fc73ae9d3c803329403a4cb1ea7dfaa6",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Ah... we were using an analytical likelihood with the DDM model in the last section.\nNow let's see something different! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "SYQT",
+ "code_hash": "795d16757f2571050e0c89644638ee4e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Simulating Angle Data \nAgain, let us simulate a simple dataset. This time we will use the angle model (passed via the model argument to the simulator() function). \nThis model is distinguished from the basic ddm model by an additional theta parameter which specifies the angle with which the decision boundaries collapse over time. \n \n\nDDMs with collapsing bounds have been of significant interest in the theoretical literature, but applications were rare due to a lack of analytical likelihoods. HSSM facilitates inference with such models via the our approx_differentiable likelihoods. HSSM ships with a few predefined models based on LANs , but really we don't want to overemphasize those. They reflect the research interest of our and adjacent labs to a great extend. \nInstead, we encourage the community to contribute to this model reservoir (more on this later). "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "PSQn",
+ "code_hash": "bc4a1494200eaef3948f9fde51c13dba",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We pass a single additional argument to our HSSM class and set model='angle'. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "lQxp",
+ "code_hash": "95323027096e88c7dfe15a771b8dff0e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "The model graph now show us an additional parameter theta! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "rSYo",
+ "code_hash": "adfc9fd1e9391dd1f569c057e39aaa98",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's check the type of likelihood that is used under the hood ... "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "WfYj",
+ "code_hash": "8399a1fb4ac136510cafcc70aac454cd",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Ok so here we rely on a likelihood of the approx_differentiable kind. \nAs discussed, with the initial set of pre-supplied likelihoods, this implies that we are using a LAN in the background. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "uDnK",
+ "code_hash": "7ef0ec928ed944775357bf40445fce01",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Choosing Priors "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "aWBL",
+ "code_hash": "7caa8fe32e17ae28217a32261b28fbc3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "HSSM allows you to specify priors quite freely. If you used HDDM previously, you may feel relieved to read that your hands are now untied! \n \n\nWith HSSM we have multiple routes to priors. But let's first consider a special case: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "MIsd",
+ "code_hash": "d493325b681724d6e68122d9f6985c48",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Fixing a parameter to a given value "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IrqS",
+ "code_hash": "07765b97b2ba5c3f558b55de21d8b987",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Assume that instead of fitting all parameters of the DDM, \n \n\nwe instead want to fit only the v (drift) parameter, setting all other parameters to fixed scalar values. \n \n\nHSSM makes this extremely easy! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "WJUG",
+ "code_hash": "6ecd157a2e6a96da177fd99856071aee",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Since we fix all but one parameter, we therefore estimate only one parameter. This should be reflected in our model graph, where we expect only one free random variable v: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "PieA",
+ "code_hash": "3fbe72f2577573be05523d2a2dd16c88",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Instead of the trace on the right, a useful alternative / complement is the rank plot .\nAs a rule of thumb, if the rank plots within chains look uniformly distributed , then our chains generally exhibit good mixing . "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Ilkb",
+ "code_hash": "88628ad8ad3b98d8d4409a51b4da3e9b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Named priors \nWe can choose any PyMC Distribution to specify a prior for a given parameter. \nEven better, if natural parameter bounds are provided, HSSM automatically truncates the prior distribution so that it respect these bounds. \nBelow is an example in which we specify a Normal prior on the v parameter of the DDM. \nWe choose a ridiculously low ||(\\sigma||) value, to illustrate it's regularizing effect on the parameter (just so we see a difference and you are convinced that something changed). "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "jXAc",
+ "code_hash": "b49b5e11aeb3876af57b3d0f5bdf934a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Observe how we reused our previous dataset with underlying parameters \n\nv = 0.5 \na = 1.5 \nz = 0.5 \nt = 0.2 \n \nIn contrast to our previous sampler round, in which we used Uniform priors, here the v estimate is shrunk severley towared ||(0||) and the t and z parameter estimates are very biased to make up for this distortion. Also, overall we see a lot of divergences now, which is a sign of poor sampler performance. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "mSxP",
+ "code_hash": "b4ca8899d02714f7d8d29fa14a61ccf2",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "HSSM Model with Regression \n \n\nCrucial to the scope of HSSM is the ability to link parameters with trial-by-trial covariates via (hierarchical, but more on this later) general linear models. \nIn this section we explore how HSSM deals with these models. No big surprise here... it's simple! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IZEX",
+ "code_hash": "0b7778e21bf1d3698fb46199f6e19e96",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Case 1: One parameter is a Regression Target "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "UFLA",
+ "code_hash": "47266aed40fdf3aa85302e735c38170a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Simulating Data "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "RCUM",
+ "code_hash": "9ad4f61ce15a28f597826549d6962d02",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's first simulate some data, where the trial-by-trial parameters of the v parameter in our model are driven by a simple linear regression model. \nThe regression model is driven by two (random) covariates x and y, respectively with coefficients of ||(0.8||) and ||(0.3||) which are also simulated below.\nWe set the intercept to ||(0.3||) . \nThe rest of the parameters are fixed to single values as before. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "NBGo",
+ "code_hash": "3659d5ca90065c7d7ec42d92341ee4cb",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Basic Model "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "eooq",
+ "code_hash": "6e69d3d7e277b29bb98a43b8bc1e58c9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We now create the HSSM model. \nNotice how we set the include argument. The include argument expects a list of dictionaries, one dictionary for each parameter to be specified via a regression model. \nFour keys are expected to be set: \n\nThe name of the parameter, \nPotentially a prior for each of the regression level parameters (||(\\beta||) 's), \nThe regression formula \nA link function. \n \nThe regression formula follows the syntax in the formulae python package (as used by the Bambi package for building Bayesian Hierarchical Regression Models. \nBambi forms the main model-construction backend of HSSM. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "vkvV",
+ "code_hash": "83b7fa3587f31fbdf290d2974493ff3c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Param class \nAs illustrated below, there is an alternative way of specifying the parameter specific data via the Param class. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "wbrJ",
+ "code_hash": "9190e9707002f2ae7189ea07060236cc",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Custom Model "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "viyg",
+ "code_hash": "8167c96f4a686909ad8033b15b94b2aa",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "These were the defaults, with a little extra labor, we can e.g. customize the choice of priors for each parameter in the model. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ctlq",
+ "code_hash": "c63488a7a41dfa92ae050160f1d6119d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Notice how v is now set as a regression. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "iFMf",
+ "code_hash": "350299302ca853e886b443ecacb717fd",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Case 2: One parameter is a Regression (LAN) \nWe can do the same thing with the angle model. \nNote : \nOur dataset was generated from the basic DDM here, so since the DDM assumes stable bounds, we expect the theta (angle of linear collapse) parameter to be recovered as close to ||(0||) . "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "gZOs",
+ "code_hash": "88079d1614e304a6323a7b782c66d0df",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Great! theta is recovered correctly, on top of that, we have reasonable recovery for all other parameters! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "wtAr",
+ "code_hash": "f431576506f6893ad658452a71f97b00",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Case 3: Multiple Parameters are Regression Targets (LAN) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "SQfX",
+ "code_hash": "fdf9405d349459b6aa3c64990abaeefb",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's get a bit more ambitious. We may, for example, want to try a regression on a few of our basic model parameters at once. Below we show an example where we model both the a and the v parameters with a regression. \nNOTE: \nIn our dataset of this section, only v is actually driven by a trial-by-trial regression, so we expect the regression coefficients for a to hover around ||(0||) in our posterior. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "lPrT",
+ "code_hash": "354766c20debfd2fa548b2ed96147973",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We successfully recover our regression betas for a! Moreover, no warning signs concerning our chains. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "KoaX",
+ "code_hash": "eac42d855b6b30e5f9ffa5424448c8c7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Case 4: Categorical covariates "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ngkg",
+ "code_hash": "7fc6884f8e28f7ffd9dac2c41dcf99cb",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Hierarchical Inference "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "BNjl",
+ "code_hash": "bdc72ff2e24293c1fbf1151fce853f46",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's try to fit a hierarchical model now. We will simulate a dataset with ||(15||) participants, with ||(200||) observations / trials for each participant. \nWe define a group mean mean_v and a group standard deviation sd_v for the intercept parameter of the regression on v, which we sample from a corresponding normal distribution for each participant. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zLYB",
+ "code_hash": "10a055021fc2aaa1152269d2aeb7dccd",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Simulate Data "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "jaBo",
+ "code_hash": "43c57a0f927ec4006494bb39ec43f834",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We can now define our HSSM model. \nWe specify the regression as v ~ 1 + (1|participant_id) + x + y. \n(1|participant_id) tells the model to create a participant-wise offset for the intercept parameter. The rest of the regression ||(\\beta||) 's is fit globally. \nAs an R user you may recognize this syntax from the lmer package. \nOur Bambi backend is essentially a Bayesian version of lmer , quite like the BRMS package in R , which operates on top of STAN . \nAs a previous HDDM user, you may recognize that now proper mixed-effect models are viable! \nYou should be able to handle between and within participant effects naturally now! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "YCKg",
+ "code_hash": "ad1e4e0cdadd8e0dcbd92d17e57bfece",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Basic Hierarchical Model "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "dGch",
+ "code_hash": "67ef188dfadcf2beb37252d72f121034",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's look at the posteriors! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "NFPl",
+ "code_hash": "698ec6b1b7f1bf6e993bb75486ab822b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Model Comparison "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "BxxS",
+ "code_hash": "0bca84feb1555b2daee235138c99a32a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Fitting single models is all well and good. We are however, often interested in comparing how well a few different models account for the same data. \nThrough ArviZ , we have out of the box access to modern Bayesian Model Comparison. We will keep it simple here, just to illustrate the basic idea. \nScenario \nThe following scenario is explored. \nFirst we generate data from a ddm model with fixed parameters, specifically we set the a parameter to ||(1.5||) . \nWe then define two HSSM models: \n\nA model which allows fitting all but the a parameter, which is fixed to ||(1.0||) (wrong) \nA model which allows fitting all but the a parameter, which is fixed to ||(1.5||) (correct) \n \nWe then use the ArviZ 's compare() function, to perform model comparison via elpd_loo. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZpGF",
+ "code_hash": "9f823ea68143a857c1b38ca32d0db056",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Data Simulation "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Iijd",
+ "code_hash": "982bfc7c2d3dd2894476cb1aa2ce3bfc",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Defining the Models "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "pgJK",
+ "code_hash": "144873be2fa66b12a90ab340638b974e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Compare "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "cwqn",
+ "code_hash": "3a35cbe10d202b444b4c602b7b282569",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Notice how the posterior weight on the correct model is close to (or equal to ) ||(1||) here.\nIn other words model comparison points us to the correct model with\na very high degree of certainty here! \nWe can also use the .plot_compare() function to illustrate the model comparison visually. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "XZIo",
+ "code_hash": "5b9c7c4373cc4eaa0ca54c805f3f8146",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Using the forest plot we can take a look at what goes wrong for the \"wrong\" model. \nTo make up for the mistplaced setting of the a parameter, the posterior seems to compensate by\nmis-estimating the other parameters. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "VjEH",
+ "code_hash": "6bf71eebc8936a7bf3d43bc8630d1390",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Closer look! \n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zesr",
+ "code_hash": "ecdf2b5638ae4650818508cadbda2ec3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We have seen a few examples of HSSM models at this point. Add a model via a string, maybe toy a bit with with the priors and set regression functions for a given parameter. Turn it hierarchical... Here we begin to peak a bit under the hood. \nAfter all, we want to encourage you to contribute models to the package yourself. \nLet's remind ourself of the model_config dictionaries that define model properties for us. Again let's start with the DDM. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "oXQC",
+ "code_hash": "cef5700c705c5ae14400baf0b5581df4",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "The dictionary has a few high level keys. \n\nresponse \nlist_params \ndescription \nlikelihoods \n \nLet us take a look at the available likelihoods: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "vAZI",
+ "code_hash": "7843d55563c488acb5c2681674b9b2e8",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "For the DDM we have available all three types of likelihoods that HSSM deals with: \n\nanalytical \napprox_differentiable \nblackbox \n \nLet's expand the dictionary contents more: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "NGNk",
+ "code_hash": "bee7091e58e3d091383e6d14cb41cdd2",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We see three properties (key) in this dictionary, of which two are essential: \n\nThe loglik field, which points to the likelihood function \nThe backend field, which can be either None (defaulting to pytensor for analytical likelihoods), jax or pytensor \nThe bounds field, which specifies bounds on a subset of the model parameters \nThe default_priors field, which specifies parameter wise priors \n \nIf you provide bounds for a parameter, but no default_priors, a Uniform prior that respects the specified bounds will be applied. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "CWwK",
+ "code_hash": "fb66f3bd1cb223a85aa981039b9bb8bf",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Next, let's look at the approx_differentiable part.\nThe likelihood in this part is based on a LAN which was available in HDDM through the LAN extension . "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "iCCe",
+ "code_hash": "5c878216096505af663b943305e34fac",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We see that the loglik field is now a string that points to a .onnx file.\nOnnx is a meta framework for Neural Network specification, that allows translation between deep learning Frameworks. This is the preferred format for the neural networks we store in our model reservoir on HuggingFace . \nMoreover notice that we now have a backend field. We allow for two primary backends in the approx_differentiable field. \n\npytensor \njax \n \nThe jax backend assumes that your likelihood is described as a jax function, the pytensor backend assumes that your likelihood is described as a pytensor function. Ok not that surprising... \nWe won't dwell on this here, however the key idea is to provide users with a large degree of flexibility in describing their likelihood functions and moreover to allow targeted optimization towards MCMC sampler types that PyMC allows us to access. \nYou can find a dedicated tutorial in the documentation, which describes the different likelihoods in much more detail. \nInstead, let's take a quick look at how these newfound insights can be used for custom model definition. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "oRYm",
+ "code_hash": "7a6c70059380f1ff0928c36435a8844e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "In this case we actually built the model class with an approx_differentiable LAN likelihood, instead of the default analytical likelihood we used in the beginning of the tutorial. The assumed generative model remains the ddm however! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "vWAQ",
+ "code_hash": "362689ebacbec087b6ff836d76636408",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We can take this further and specify a completely custom likelihood. See the dedicated tutorial for more examples! \nWe will see one specific example below to illustrate another type of likelihood function we have available for model building in HSSM, the Blackbox likelihood. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "opEZ",
+ "code_hash": "5610cb45075a5ea4f06b8d9370f8d87d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "'Blackbox' Likelihoods \n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zhCr",
+ "code_hash": "aae32d442f99236c41ea0cfcbdf3ba3f",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "What is a Blackbox Likelihood Function ? \nA Blackbox Likelihood Function is essentially any Python callable (function) that provides trial by trial likelihoods for your model of interest. What kind of computations are performed in this Python function is completely arbitrary. \nE.g. you could built a function that performs forward simulation from you model, constructs are kernel-density estimate for the resulting likelihood functions and evaluates your datapoints on this ad-hoc generated approximate likelihood. \nWhat I just described is a once state-of-the-art method of performing simulation based inference on Sequential Sampling models, a precursor to LANs if you will. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "CCxq",
+ "code_hash": "e15cceecd9306e0a90b1964de2292638",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We will do something simpler to keep it short and sweet, but really... the possibilities are endless! \n \n \n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "jKDm",
+ "code_hash": "1b79f6d90d77310125422cae43d6354e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Simulating simple dataset from the DDM "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "OcMK",
+ "code_hash": "0720db7ae37f5fbfeb0fad274a740ce3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "As always, let's begin by generating some simple dataset. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZzbA",
+ "code_hash": "2ea7c99d5fe5ff542087e6c075c5e47c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Define the likelihood "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ETTw",
+ "code_hash": "23177d963cd1474416f1e7108a301d9c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Now the fun part... we simply define a Python function my_blackbox_loglik which takes in our data as well as a bunch of model parameters (in our case the familiar v,a, z, t from the DDM). \nThe function then does some arbitrary computation inside (in our case e.g. we pass the data and parameters to the DDM log-likelihood from our predecessor package HDDM). \nThe important part is that inside my_blackbox_loglik anything can happen. We happen to call a little custom function that defines the likelihood of a DDM. \nFun fact: \nIt is de-facto the likelihood which is called by HDDM . "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IiyY",
+ "code_hash": "66ae2a9c51775b772d6b3e3e0c1bd432",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Define HSSM class with our Blackbox Likelihood "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "rKHS",
+ "code_hash": "4ccde8cc786cf75ec2c14daa706c8c5b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We can now define our HSSM model class as usual, however passing our my_blackbox_loglik() function to the loglik argument, and passing as loglik_kind = blackbox. \nThe rest of the model config is as usual. Here we can reuse our ddm model config, and simply specify bounds on the parameters (e.g. your Blackbox Likelihood might be trustworthy only on a restricted parameters space). "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "MwfG",
+ "code_hash": "58151af82b7b9378ab3bc9bfbfc178c9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "NOTE : \nSince Blackbox likelihood functions are assumed to not be differentiable, our default sampler for such likelihood functions is a Slice sampler. HSSM allows you to choose any other suitable sampler from the PyMC package instead. A bunch of options are available for gradient-free samplers. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "TxKU",
+ "code_hash": "4a7640cb2435f7a5e042691b8a5ab97c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Results "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ieTG",
+ "code_hash": "bc86abcb5090ec15c6877e9f282fcfd0",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "HSSM Random Variables in PyMC \nWe covered a lot of ground in this tutorial so far. You are now a sophisticated HSSM user. \nIt is therefore time to reveal a secret. We can actuallly peel back one more layer...\n \n \n \nInstead of letting HSSM help you build the entire model, we can instead use HSSM to construct valid PyMC distributions and then proceed to build a custom PyMC model by ourselves...\n \n \n \nWe will illustrate the simplest example below. It sets a pattern that can be exploited for much more complicated modeling exercises, which importantly go far beyond what our basic HSSM class may facilitate for you! \nSee the dedicated tutorial in the documentation if you are interested. \nLet's start by importing a few convenience functions: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "nnaP",
+ "code_hash": "736f77a65cca13c354132f3b4d486756",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Simulate some data "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "rEHw",
+ "code_hash": "38864260ac1c5ec88d8bd657aeeea58b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Build a custom PyMC Model "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "nRku",
+ "code_hash": "912f3e00dd142f1a8421d8eb45e27145",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "We can now use our custom random variable DDM directly in a PyMC model. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZpYI",
+ "code_hash": "5683733bc64dd45f9024e21b35b80da3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Let's check the model graph: "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "cFzV",
+ "code_hash": "e1bfeaa3bc3f6bc1938372d84ceb4d9c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Looks remarkably close to our HSSM version! \nWe can use PyMC directly to sample and finally return to ArviZ for some plotting! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "HqwE",
+ "code_hash": "7c687f17fd04eb170f1c2aed8235ef99",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Alternative Models with PyMC \nWith very little extra work, we can in fact load any of the models accessible via HSSM. Here is an example, where we load the angle model instead. \nWe first construction the likelihood function, using make_likelihood_callable(). \nThen we produce a valid pymc.distribution using the\nmake_distribution() utility function. \nJust like the DDM class above, we can then use this distribution inside a PyMC model. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "jgCY",
+ "code_hash": "5ff2c060baa7b9d5b94c48f520f9fa56",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Note that we need to supply the params_is_reg argument (\"reg\" for \"regression\").\nThis is a boolean vector, which specifies for each input to the likelihood function, whether or not it is defined to be \"trial-wise\", as is expected if the parameter\nis the output e.g. of a regression function. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "YSLY",
+ "code_hash": "0e3c51915d1d57dc2379a386fe184151",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "Regression via PyMC \nFinally to illustrate the usage of PyMC a little more elaborately, let us build a PyMC model with regression components. "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "QVpv",
+ "code_hash": "eb10fcd51d58229a66495ad7867a359a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "All layers peeled back, the only limit in your modeling endeavors becomes the limit of the PyMC universe! \n \n \n\n Enjoy the exploration! "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "hXpz",
+ "code_hash": "ca218914ead314da414928fce753fe2e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/markdown": "End "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Hbol",
+ "code_hash": "8aa048bef04d5e215cc8514cee844ca7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "
PosixPath('/home/jovan/Documents/projects/hssm_wksp/HSSM/docs/tutorials') "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "lEQa",
+ "code_hash": "c29e154ba88da175215ab5d4c799118c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Xref",
+ "code_hash": "a50dcb87a223aac4a2c6e754f7c3b585",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Kclp",
+ "code_hash": "379681790c04615eca36146b12d9db64",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Hstk",
+ "code_hash": "585b745a3330a8917829b9ea68afe6d7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "iLit",
+ "code_hash": "af11df090be7dd1b12cafa3629beb85f",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/json": "[\"ddm\", \"ddm_sdv\", \"full_ddm\", \"angle\", \"levy\", \"ornstein\", \"weibull\", \"race_no_bias_angle_4\", \"ddm_seq2_no_bias\", \"lba3\", \"lba2\"]"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ROlb",
+ "code_hash": "5ba22750d6834e4f055f784e19ec0ac1",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/json": "{\"response\": [\"rt\", \"response\"], \"list_params\": [\"v\", \"a\", \"z\", \"t\"], \"choices\": [-1, 1], \"description\": \"The Drift Diffusion Model (DDM)\", \"likelihoods\": {\"analytical\": {\"loglik\": \"text/html:function logp_ddm
Compute analytical likelihood for the DDM model with `sv`.
def logp_ddm(data: numpy.ndarray, v: float, a: float, z: float, t: float, err: float = 1e-15, k_terms: int = 20, epsilon: float = 1e-15) -> numpy.ndarray:
\", \"backend\": null, \"bounds\": {\"v\": [\"text/plain+float:-inf\", \"text/plain+float:inf\"], \"a\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"]}, \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"extra_fields\": null}, \"approx_differentiable\": {\"loglik\": \"ddm.onnx\", \"backend\": \"jax\", \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"bounds\": {\"v\": [\"text/plain+float:-3.0\", \"text/plain+float:3.0\"], \"a\": [\"text/plain+float:0.3\", \"text/plain+float:2.5\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:2.0\"]}, \"extra_fields\": null}, \"blackbox\": {\"loglik\": \"text/html:function outer
def outer(data: numpy.ndarray, *args, **kwargs):
\", \"backend\": null, \"bounds\": {\"v\": [\"text/plain+float:-inf\", \"text/plain+float:inf\"], \"a\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"]}, \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"extra_fields\": null}}}"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Vxnm",
+ "code_hash": "b849e1f67d64898458f6645d5cb141bc",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "aLJB",
+ "code_hash": "19e7daf3f594fd27aca34d1ea3e90200",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "AjVT",
+ "code_hash": "7481d7f032267a83544366f4834d9125",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "pHFh",
+ "code_hash": "96bf3f613b57aaad95a638c21d0c9fef",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: ddm\n\nResponse variable: rt,response\nLikelihood: analytical\nObservations: 500\n\nParameters:\n\nv:\n Prior: Normal(mu: 0.0, sigma: 2.0)\n Explicit bounds: (-inf, inf)\n\na:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\nz:\n Prior: Uniform(lower: 0.0, upper: 1.0)\n Explicit bounds: (0.0, 1.0)\n\nt:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "TRpd",
+ "code_hash": "90c8b356c5ba0b881f7de499a330be3f",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "yCnT",
+ "code_hash": "361fb77c0069452430ce387c638a2015",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [a, t, z, v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1500 0 0.463 3 94.96 draws/s 0:00:15 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1500 0 0.782 7 48.60 draws/s 0:00:30 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 1_000 tune and 500 draw iterations (2_000 + 1_000 draws total) took 31 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/1000 [00:00, ?it/s]\r 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 139/1000 [00:00<00:00, 1385.32it/s]\r 29%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 290/1000 [00:00<00:00, 1454.97it/s]\r 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 436/1000 [00:00<00:00, 1399.86it/s]\r 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 586/1000 [00:00<00:00, 1432.63it/s]\r 74%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 742/1000 [00:00<00:00, 1476.77it/s]\r 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 897/1000 [00:00<00:00, 1498.68it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1000/1000 [00:00<00:00, 1483.04it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "kqZH",
+ "code_hash": "62088133f4aa7b6159ca8cb3a091dbda",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "<class 'arviz.data.inference_data.InferenceData'> "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "dGlV",
+ "code_hash": "d02de374d9d74b8b812da23fc49f7d8d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n \n \n
\n \n \n \n posterior \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 36kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 7 ... 493 494 495 496 497 498 499\nData variables:\n t (chain, draw) float64 8kB 0.4858 0.4874 0.5649 ... 0.5131 0.5023\n a (chain, draw) float64 8kB 1.42 1.396 1.432 ... 1.459 1.465 1.531\n v (chain, draw) float64 8kB 0.46 0.4994 0.4201 ... 0.4756 0.4639\n z (chain, draw) float64 8kB 0.4921 0.4953 0.5207 ... 0.5448 0.5019\nAttributes:\n created_at: 2026-01-08T04:55:33.399374+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 30.849383115768433\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (4)
t
(chain, draw)
float64
0.4858 0.4874 ... 0.5131 0.5023
array([[0.48575898, 0.4874169 , 0.56489413, 0.52327668, 0.52327668,\n 0.51735751, 0.53034364, 0.47649765, 0.46284268, 0.46520331,\n 0.46083669, 0.4593662 , 0.48406028, 0.4526651 , 0.46001507,\n 0.48118006, 0.5294147 , 0.53076833, 0.4787791 , 0.47996894,\n 0.44641416, 0.48250367, 0.48269964, 0.49808578, 0.57851296,\n 0.55575702, 0.49335675, 0.53381615, 0.52763723, 0.51874272,\n 0.48449161, 0.54815292, 0.45062441, 0.521377 , 0.50926011,\n 0.52282439, 0.53104155, 0.50582164, 0.46798903, 0.47273825,\n 0.51961557, 0.55223779, 0.52318457, 0.54208213, 0.51812459,\n 0.55992107, 0.48963462, 0.54657144, 0.51707202, 0.48923537,\n 0.53118936, 0.50839344, 0.50624422, 0.51701415, 0.50237139,\n 0.52909628, 0.50828604, 0.52146805, 0.53679188, 0.4696402 ,\n 0.54550002, 0.49338531, 0.48342327, 0.55110842, 0.50910738,\n 0.51598703, 0.49536839, 0.46054526, 0.54135241, 0.53620886,\n 0.50751537, 0.49578939, 0.51854171, 0.53459467, 0.53698073,\n 0.52076779, 0.53930056, 0.55693755, 0.56330514, 0.54042356,\n 0.49135896, 0.46826939, 0.47642666, 0.52091203, 0.51972218,\n 0.49114814, 0.50305294, 0.49013528, 0.48449883, 0.48801788,\n 0.44691148, 0.55053182, 0.54364458, 0.50496596, 0.53677642,\n 0.53519451, 0.53467746, 0.49012529, 0.50976122, 0.51972499,\n...\n 0.53135932, 0.49476602, 0.49237559, 0.49237559, 0.52153435,\n 0.51780916, 0.45114472, 0.43669586, 0.5219717 , 0.49255034,\n 0.54366199, 0.50351056, 0.55812408, 0.49702379, 0.49229607,\n 0.54182283, 0.56007076, 0.51446918, 0.51673506, 0.5027071 ,\n 0.51314394, 0.51399041, 0.4969603 , 0.54656141, 0.50973111,\n 0.46628013, 0.50426948, 0.51560854, 0.49978386, 0.45752611,\n 0.47419317, 0.47419317, 0.49069551, 0.51973564, 0.49829212,\n 0.49473534, 0.50672342, 0.49824202, 0.48064452, 0.50357108,\n 0.49601765, 0.5384329 , 0.51425332, 0.4635438 , 0.5117676 ,\n 0.50766191, 0.52603337, 0.52351951, 0.5366462 , 0.49393139,\n 0.54399711, 0.50216277, 0.48067998, 0.51274 , 0.44683778,\n 0.4511459 , 0.51588204, 0.49525355, 0.52425554, 0.47243496,\n 0.47243496, 0.47541677, 0.48109169, 0.49133874, 0.47860567,\n 0.5068819 , 0.47844727, 0.52531496, 0.50954969, 0.5266407 ,\n 0.48567178, 0.52013718, 0.49702878, 0.5152404 , 0.50095719,\n 0.4890485 , 0.50336455, 0.50587956, 0.503944 , 0.47849903,\n 0.53080546, 0.53078259, 0.50781999, 0.50148963, 0.51480523,\n 0.50365531, 0.49776146, 0.52533235, 0.53007223, 0.4927408 ,\n 0.46641809, 0.46647075, 0.48059084, 0.46988261, 0.53398489,\n 0.48715129, 0.50732104, 0.47348156, 0.51305504, 0.50231702]]) a
(chain, draw)
float64
1.42 1.396 1.432 ... 1.465 1.531
array([[1.41962593, 1.39567367, 1.4317506 , 1.43815873, 1.43815873,\n 1.41506752, 1.42489165, 1.47228951, 1.54019871, 1.54261446,\n 1.50883799, 1.48778168, 1.48907592, 1.43981795, 1.47780259,\n 1.47182224, 1.49534181, 1.37663927, 1.49219117, 1.45348848,\n 1.53877615, 1.48388185, 1.50187359, 1.42217578, 1.38681059,\n 1.37511451, 1.47061828, 1.4336922 , 1.42147773, 1.41324983,\n 1.49542278, 1.38724289, 1.52539289, 1.49257124, 1.49170351,\n 1.45692004, 1.4366145 , 1.48474581, 1.52332005, 1.54322473,\n 1.4600666 , 1.41252005, 1.44291269, 1.45527941, 1.42233897,\n 1.43794569, 1.47904204, 1.43858699, 1.44538457, 1.38315857,\n 1.47805613, 1.43128946, 1.47748292, 1.47310777, 1.43448457,\n 1.45652247, 1.48214165, 1.41785655, 1.4974154 , 1.42919286,\n 1.44088427, 1.45988243, 1.47393757, 1.4156567 , 1.5180166 ,\n 1.40384394, 1.42316929, 1.45782434, 1.50473414, 1.44310825,\n 1.4585378 , 1.46519248, 1.44046144, 1.4418534 , 1.44324006,\n 1.44996868, 1.4305769 , 1.36119743, 1.42224077, 1.44935953,\n 1.49600976, 1.4710327 , 1.47940687, 1.41755625, 1.45677903,\n 1.41673891, 1.47461421, 1.45417031, 1.53012483, 1.51558344,\n 1.52305116, 1.43019528, 1.39317838, 1.49107451, 1.3956755 ,\n 1.42923222, 1.44233663, 1.43354637, 1.45811866, 1.41322255,\n...\n 1.47569475, 1.49894881, 1.454072 , 1.454072 , 1.43673935,\n 1.42883534, 1.52023953, 1.4998433 , 1.49035233, 1.43324679,\n 1.4293716 , 1.4480808 , 1.40817894, 1.38344763, 1.48465827,\n 1.42550672, 1.41047898, 1.4757535 , 1.4400986 , 1.45069438,\n 1.47423521, 1.45967282, 1.47093848, 1.42788083, 1.47629967,\n 1.48285612, 1.38494346, 1.46100182, 1.51436122, 1.46489208,\n 1.50264806, 1.50264806, 1.4876187 , 1.4092414 , 1.44732079,\n 1.42765811, 1.46670338, 1.48656924, 1.4691816 , 1.46190357,\n 1.46920654, 1.41735254, 1.45906975, 1.46405789, 1.46577055,\n 1.39656671, 1.45102417, 1.38562744, 1.40765999, 1.48777242,\n 1.3922839 , 1.44482542, 1.51418906, 1.4185966 , 1.51692311,\n 1.52406142, 1.45007398, 1.46300545, 1.42843102, 1.46536687,\n 1.46536687, 1.46885152, 1.46241395, 1.5309264 , 1.51293759,\n 1.48528343, 1.46810474, 1.46595447, 1.45276806, 1.46347675,\n 1.4390223 , 1.41264463, 1.48994292, 1.40924903, 1.46437362,\n 1.48909828, 1.4463778 , 1.44434882, 1.46632659, 1.46442781,\n 1.41721859, 1.43887618, 1.45153336, 1.45764906, 1.47446183,\n 1.45247501, 1.4773564 , 1.41632352, 1.48490392, 1.44130061,\n 1.54995306, 1.46375494, 1.53193968, 1.4798981 , 1.46266293,\n 1.43188296, 1.46957178, 1.45873085, 1.4645994 , 1.53145956]]) v
(chain, draw)
float64
0.46 0.4994 ... 0.4756 0.4639
array([[0.46002474, 0.49935788, 0.42010292, 0.47832265, 0.47832265,\n 0.49531407, 0.42805387, 0.47274173, 0.49211158, 0.46531771,\n 0.52405831, 0.52751463, 0.48996999, 0.48740123, 0.45327257,\n 0.44433403, 0.3999858 , 0.4400614 , 0.49898925, 0.46203263,\n 0.51281462, 0.54555939, 0.58259382, 0.40303473, 0.44202404,\n 0.44628367, 0.47327411, 0.44126281, 0.42563734, 0.44816899,\n 0.51424922, 0.40406922, 0.50454619, 0.54675165, 0.56177533,\n 0.42184901, 0.44984197, 0.45478304, 0.48355605, 0.47857872,\n 0.42284614, 0.42280862, 0.39300878, 0.44165036, 0.40855478,\n 0.46295939, 0.38347846, 0.43803047, 0.41221236, 0.40121386,\n 0.48724416, 0.41713424, 0.4894201 , 0.45898768, 0.45573498,\n 0.49215874, 0.45280984, 0.4394082 , 0.47264357, 0.41767965,\n 0.42896376, 0.4754212 , 0.49607791, 0.42347567, 0.51022987,\n 0.34891115, 0.49754998, 0.46131279, 0.45161885, 0.4376886 ,\n 0.46499653, 0.47842048, 0.54809135, 0.51327392, 0.44520869,\n 0.44395821, 0.4539666 , 0.44784109, 0.41385033, 0.44637958,\n 0.49903689, 0.4313524 , 0.46596975, 0.42145104, 0.42400477,\n 0.45833683, 0.44103262, 0.47911584, 0.5030067 , 0.54724429,\n 0.47889834, 0.50877738, 0.46336246, 0.3988057 , 0.47521016,\n 0.53218054, 0.45433204, 0.46957592, 0.44154727, 0.42166304,\n...\n 0.46086418, 0.45642178, 0.43570236, 0.43570236, 0.4829974 ,\n 0.4666672 , 0.52365082, 0.54384614, 0.433968 , 0.39185751,\n 0.49576688, 0.44625795, 0.47557563, 0.43994571, 0.54460588,\n 0.46881728, 0.44246676, 0.43203313, 0.47421233, 0.45738979,\n 0.44339128, 0.49097843, 0.50331195, 0.44334794, 0.44424603,\n 0.44985518, 0.47824584, 0.48500045, 0.45922198, 0.44563862,\n 0.48540394, 0.48540394, 0.47877128, 0.41778203, 0.5043077 ,\n 0.44035812, 0.47148907, 0.51071731, 0.49095185, 0.52204284,\n 0.51396949, 0.47694901, 0.44565396, 0.43047348, 0.40925179,\n 0.54186569, 0.43236364, 0.46116241, 0.42952852, 0.40603855,\n 0.41185788, 0.52819373, 0.46235274, 0.52754265, 0.47926358,\n 0.45898006, 0.48723861, 0.4859017 , 0.46183195, 0.53758073,\n 0.53758073, 0.52305545, 0.49480701, 0.48161108, 0.46251392,\n 0.48742003, 0.46839586, 0.47882352, 0.50716931, 0.51746354,\n 0.452746 , 0.4437606 , 0.51680848, 0.46444835, 0.40887173,\n 0.42780076, 0.469227 , 0.43542738, 0.46011848, 0.47629926,\n 0.4202373 , 0.36867242, 0.52693297, 0.47675539, 0.56824748,\n 0.54620153, 0.46019097, 0.48401945, 0.47754063, 0.45489857,\n 0.56569335, 0.43165606, 0.57073731, 0.51990311, 0.46522088,\n 0.53405837, 0.49894122, 0.54383762, 0.47557582, 0.46393061]]) z
(chain, draw)
float64
0.4921 0.4953 ... 0.5448 0.5019
array([[0.49214773, 0.49526222, 0.52065169, 0.50104238, 0.50104238,\n 0.50071145, 0.50939918, 0.50037135, 0.49347186, 0.49116017,\n 0.46882245, 0.49284441, 0.47529894, 0.47943944, 0.46911097,\n 0.5110609 , 0.54573222, 0.50257665, 0.48516217, 0.50222268,\n 0.47693099, 0.4725742 , 0.48062739, 0.51638469, 0.51507879,\n 0.52305171, 0.49708241, 0.50711891, 0.50880663, 0.49950493,\n 0.5061342 , 0.53070657, 0.47466182, 0.51307581, 0.51866077,\n 0.50979223, 0.52732014, 0.51605704, 0.5082604 , 0.49617651,\n 0.51909792, 0.53961015, 0.52655521, 0.5407605 , 0.50878472,\n 0.53094502, 0.51847335, 0.53677114, 0.5182387 , 0.51909091,\n 0.53485403, 0.49622814, 0.51531898, 0.51205144, 0.51825837,\n 0.51595277, 0.52232993, 0.50775073, 0.52043147, 0.50780626,\n 0.52627376, 0.49105109, 0.50583662, 0.51454489, 0.51166629,\n 0.54910688, 0.5072023 , 0.50350706, 0.52234158, 0.51050429,\n 0.52470601, 0.46930605, 0.50928572, 0.49088719, 0.52006544,\n 0.52272162, 0.51856312, 0.52149496, 0.5162203 , 0.52257794,\n 0.51128888, 0.50527449, 0.49711225, 0.5059195 , 0.52861927,\n 0.50761143, 0.5138448 , 0.50312694, 0.49573275, 0.48988484,\n 0.48831786, 0.51759673, 0.52700703, 0.53236432, 0.49631835,\n 0.51226661, 0.51636912, 0.49635375, 0.4924294 , 0.49782483,\n...\n 0.52986081, 0.51755342, 0.50477203, 0.50477203, 0.51480378,\n 0.49696597, 0.49560143, 0.47340147, 0.52754865, 0.51601404,\n 0.52566238, 0.50630206, 0.50571067, 0.51737786, 0.49776678,\n 0.50774332, 0.5138328 , 0.5173622 , 0.50385519, 0.52668574,\n 0.51967973, 0.49394986, 0.48228597, 0.52173284, 0.5007935 ,\n 0.51963708, 0.50798951, 0.48752222, 0.50583894, 0.49544127,\n 0.48805774, 0.48805774, 0.51450044, 0.49002662, 0.50186736,\n 0.50550457, 0.49502883, 0.51691578, 0.48990291, 0.49806719,\n 0.48693259, 0.50484327, 0.50447346, 0.50352883, 0.50816364,\n 0.48608033, 0.50795832, 0.50649649, 0.48120236, 0.5095809 ,\n 0.50024496, 0.51276875, 0.49020348, 0.48025969, 0.50855525,\n 0.5057506 , 0.50356763, 0.51698105, 0.51985403, 0.51618664,\n 0.51618664, 0.52155587, 0.49933288, 0.52577785, 0.50430775,\n 0.51066993, 0.49952365, 0.51502769, 0.52448468, 0.491171 ,\n 0.49573328, 0.51049909, 0.49443511, 0.49829613, 0.50818895,\n 0.52376491, 0.49390018, 0.51250314, 0.52820319, 0.49180201,\n 0.5296624 , 0.50902894, 0.50374124, 0.48193598, 0.48672799,\n 0.49286581, 0.51510466, 0.52152346, 0.5045378 , 0.51264841,\n 0.48071807, 0.49692682, 0.48801087, 0.48245312, 0.50786497,\n 0.48692662, 0.48323057, 0.48457479, 0.54476633, 0.5018969 ]]) Attributes: (8)
created_at : 2026-01-08T04:55:33.399374+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 30.849383115768433 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n log_likelihood \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 4MB\nDimensions: (chain: 2, draw: 500, __obs__: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 ... 493 494 495 496 497 498 499\n * __obs__ (__obs__) int64 4kB 0 1 2 3 4 5 6 ... 494 495 496 497 498 499\nData variables:\n rt,response (chain, draw, __obs__) float64 4MB -4.726 -1.446 ... -3.6 -1.44\nAttributes:\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 \n \n
\n \n \n \n \n sample_stats \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 134kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 ... 495 496 497 498 499\nData variables: (12/18)\n divergences (chain, draw) int64 8kB 0 0 0 0 0 0 0 ... 0 0 0 0 0 0\n step_size_bar (chain, draw) float64 8kB 0.6795 0.6795 ... 0.6139\n smallest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n acceptance_rate (chain, draw) float64 8kB 0.958 0.7645 ... 0.9557\n perf_counter_diff (chain, draw) float64 8kB 0.009761 ... 0.01051\n n_steps (chain, draw) float64 8kB 7.0 7.0 7.0 ... 3.0 7.0 7.0\n ... ...\n diverging (chain, draw) bool 1kB False False ... False False\n energy_error (chain, draw) float64 8kB 0.1802 0.7816 ... -0.4273\n largest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n tree_depth (chain, draw) int64 8kB 3 3 3 3 2 2 3 ... 3 3 2 2 3 3\n max_energy_error (chain, draw) float64 8kB -0.2293 0.7816 ... -1.314\n energy (chain, draw) float64 8kB 1.029e+03 ... 1.032e+03\nAttributes:\n created_at: 2026-01-08T04:55:33.411973+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 30.849383115768433\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (18)
divergences
(chain, draw)
int64
0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n...\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) step_size_bar
(chain, draw)
float64
0.6795 0.6795 ... 0.6139 0.6139
array([[0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n 0.67951135, 0.67951135, 0.67951135, 0.67951135, 0.67951135,\n...\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042,\n 0.61387042, 0.61387042, 0.61387042, 0.61387042, 0.61387042]]) smallest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) acceptance_rate
(chain, draw)
float64
0.958 0.7645 ... 0.604 0.9557
array([[0.95801053, 0.76447877, 0.94773898, 1. , 0.48756081,\n 0.75745382, 0.96351512, 1. , 0.90093633, 0.85381052,\n 0.90225665, 0.99426105, 0.91716105, 0.61598902, 0.79106332,\n 0.90378824, 0.93295385, 0.89978384, 0.95118566, 0.8403931 ,\n 0.89071995, 0.95273242, 0.8684241 , 0.88606834, 0.83128497,\n 1. , 0.70688015, 0.75208537, 1. , 0.72867336,\n 0.86637098, 0.99765838, 0.52780867, 0.83243235, 0.65038603,\n 1. , 1. , 0.86296985, 0.94782788, 0.79027438,\n 0.74503042, 0.87793851, 0.87527755, 0.94419644, 0.42987378,\n 0.90551433, 1. , 0.94453992, 0.77434937, 0.32680798,\n 0.91731101, 0.875466 , 1. , 0.97506111, 0.81413564,\n 0.93353809, 0.97316971, 0.98440162, 0.46862563, 0.93587668,\n 0.86511503, 0.77808315, 0.85206662, 0.96314516, 0.95256639,\n 0.73970891, 0.87455697, 0.96064317, 0.90217413, 0.98953041,\n 1. , 0.66350226, 0.9529042 , 1. , 0.78777483,\n 0.99922998, 0.63338759, 0.60526823, 0.60432992, 0.95318632,\n 0.96958792, 0.86586073, 1. , 0.93910058, 0.99583013,\n 0.59474342, 0.60788517, 0.8847175 , 0.9333339 , 0.99478106,\n 0.78230891, 0.83488463, 0.9616094 , 0.57179082, 0.92565743,\n 0.50047732, 1. , 0.67061027, 0.8231021 , 0.96264938,\n...\n 0.95936481, 0.9665499 , 0.9119308 , 0.62269982, 0.93451467,\n 0.83449185, 0.88479533, 0.62899238, 0.98050446, 0.87845567,\n 0.94391775, 0.44795988, 0.82206181, 0.16916384, 0.92871901,\n 0.96679926, 0.82655608, 0.97867647, 0.95041843, 0.74685528,\n 0.969639 , 0.87561345, 0.95610803, 0.97949708, 0.79164499,\n 0.9472773 , 0.85410822, 0.90505454, 0.98991344, 0.61280546,\n 0.92470762, 0.5081489 , 0.998332 , 0.838233 , 1. ,\n 0.79547114, 0.9848272 , 0.71806039, 0.90956659, 0.85989665,\n 0.99051319, 0.92162512, 0.88697705, 0.59326667, 0.69423233,\n 0.91854363, 0.97837864, 0.95905873, 0.5074625 , 1. ,\n 0.90767096, 0.80207131, 1. , 0.89192014, 0.73470543,\n 0.86544846, 0.90625653, 0.75969799, 0.99787149, 0.36066898,\n 0.56127542, 0.80987858, 1. , 0.7035562 , 1. ,\n 0.81091694, 0.87596091, 0.88003247, 0.61531265, 0.67348958,\n 0.95977548, 0.70458961, 0.99843309, 0.95146909, 0.87552423,\n 0.90738566, 0.97476421, 1. , 0.79774575, 0.80259394,\n 0.97896301, 0.45831937, 1. , 0.77526381, 0.75282361,\n 1. , 0.89835929, 0.81526076, 0.44368038, 1. ,\n 0.94090913, 1. , 0.93311283, 1. , 0.76055755,\n 0.9713109 , 1. , 1. , 0.60402871, 0.95573136]]) perf_counter_diff
(chain, draw)
float64
0.009761 0.009409 ... 0.01051
array([[0.00976067, 0.00940912, 0.01000556, 0.00928937, 0.00551971,\n 0.00489707, 0.01065942, 0.00947457, 0.00529812, 0.00505827,\n 0.00979168, 0.00533021, 0.00487485, 0.00468093, 0.00566936,\n 0.00978992, 0.01085803, 0.01004486, 0.00966734, 0.01143464,\n 0.00941999, 0.00982527, 0.0045445 , 0.01029467, 0.00452437,\n 0.00501912, 0.00939779, 0.00972981, 0.00472373, 0.00457586,\n 0.00966738, 0.00894511, 0.0104934 , 0.00969372, 0.00260143,\n 0.00478094, 0.01005243, 0.0096262 , 0.01483763, 0.00642341,\n 0.01256693, 0.00806862, 0.01231321, 0.00716596, 0.00494236,\n 0.00695015, 0.00920595, 0.01008073, 0.004613 , 0.00995854,\n 0.00967556, 0.00968941, 0.00444076, 0.00541492, 0.00475528,\n 0.00925999, 0.00956138, 0.00978691, 0.01001186, 0.00522323,\n 0.00925214, 0.010284 , 0.0088978 , 0.01007436, 0.00949051,\n 0.01953216, 0.00928812, 0.0096677 , 0.01952797, 0.00445686,\n 0.00973391, 0.0093756 , 0.0095933 , 0.00440837, 0.00984296,\n 0.00949782, 0.00498498, 0.00471817, 0.00448649, 0.00525677,\n 0.00912562, 0.00477179, 0.00943915, 0.01074735, 0.00959515,\n 0.0146046 , 0.00747149, 0.01073469, 0.00809464, 0.0111755 ,\n 0.00919888, 0.01453859, 0.0095891 , 0.01018673, 0.00991936,\n 0.00469614, 0.00979543, 0.00911037, 0.00991346, 0.00924965,\n...\n 0.00964745, 0.01001705, 0.0102834 , 0.00919346, 0.01032067,\n 0.00454787, 0.01005214, 0.00461196, 0.01000483, 0.00973918,\n 0.01034643, 0.00468328, 0.00520704, 0.00974769, 0.01038606,\n 0.00938519, 0.01037312, 0.01033014, 0.00965899, 0.01046881,\n 0.00494324, 0.01133945, 0.00990326, 0.01050373, 0.01003847,\n 0.00483045, 0.00928249, 0.00508887, 0.00913697, 0.00544667,\n 0.00924242, 0.0027728 , 0.01014795, 0.01010897, 0.0100006 ,\n 0.01052253, 0.01004186, 0.00522488, 0.00482309, 0.01028124,\n 0.00939028, 0.01065592, 0.01014137, 0.00938648, 0.0102589 ,\n 0.00992994, 0.0099894 , 0.00990063, 0.00492518, 0.01008096,\n 0.00965396, 0.00505706, 0.00565597, 0.00956948, 0.01046014,\n 0.01003214, 0.00932581, 0.01088852, 0.00940114, 0.00995334,\n 0.0045381 , 0.00543328, 0.00481287, 0.00442114, 0.00501779,\n 0.00968485, 0.01064446, 0.00944608, 0.00600717, 0.00502451,\n 0.00456937, 0.01097153, 0.01112361, 0.00956175, 0.01081218,\n 0.00474565, 0.01056818, 0.00910829, 0.01041984, 0.0099011 ,\n 0.00950634, 0.00526448, 0.00993105, 0.01141821, 0.00491677,\n 0.00543888, 0.01633788, 0.01510646, 0.01096829, 0.00466899,\n 0.01115474, 0.01079556, 0.00970111, 0.01056306, 0.01043336,\n 0.00983016, 0.00513409, 0.00514381, 0.00962431, 0.01051176]]) n_steps
(chain, draw)
float64
7.0 7.0 7.0 7.0 ... 3.0 3.0 7.0 7.0
array([[ 7., 7., 7., 7., 3., 3., 7., 7., 3., 3., 7., 3., 3.,\n 3., 3., 7., 7., 7., 7., 7., 7., 7., 3., 7., 3., 3.,\n 7., 7., 3., 3., 7., 7., 7., 7., 1., 3., 7., 7., 7.,\n 3., 7., 3., 7., 3., 3., 3., 7., 7., 3., 7., 7., 7.,\n 3., 3., 3., 7., 7., 7., 7., 3., 7., 7., 7., 7., 7.,\n 15., 7., 7., 15., 3., 7., 7., 7., 3., 7., 7., 3., 3.,\n 3., 3., 7., 3., 7., 7., 7., 7., 3., 7., 3., 7., 7.,\n 11., 7., 7., 7., 3., 7., 7., 7., 7., 7., 7., 7., 7.,\n 3., 7., 7., 3., 7., 7., 15., 7., 7., 15., 7., 7., 7.,\n 3., 3., 7., 7., 7., 3., 7., 1., 7., 7., 7., 3., 7.,\n 7., 7., 3., 7., 3., 7., 3., 7., 7., 3., 7., 7., 3.,\n 7., 7., 3., 7., 7., 7., 7., 7., 7., 7., 7., 7., 3.,\n 7., 3., 3., 7., 3., 3., 3., 7., 7., 7., 7., 3., 7.,\n 3., 7., 3., 7., 7., 3., 3., 3., 7., 3., 7., 7., 7.,\n 7., 7., 3., 7., 3., 3., 7., 7., 3., 3., 7., 7., 7.,\n 7., 7., 3., 3., 7., 7., 3., 5., 7., 7., 7., 3., 3.,\n 3., 11., 3., 7., 1., 7., 7., 3., 3., 7., 7., 7., 7.,\n 7., 7., 1., 7., 3., 7., 3., 7., 5., 3., 7., 3., 7.,\n 7., 7., 7., 7., 7., 7., 7., 3., 3., 7., 3., 7., 7.,\n 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7.,\n...\n 3., 3., 3., 3., 3., 7., 7., 3., 7., 3., 7., 7., 7.,\n 7., 7., 3., 3., 7., 3., 7., 7., 7., 7., 7., 7., 7.,\n 7., 7., 7., 3., 7., 7., 7., 3., 7., 7., 7., 7., 3.,\n 7., 7., 7., 3., 7., 7., 7., 7., 3., 7., 3., 3., 3.,\n 3., 7., 7., 3., 7., 7., 7., 7., 7., 3., 7., 7., 7.,\n 3., 3., 15., 3., 7., 3., 7., 7., 7., 7., 3., 7., 7.,\n 7., 7., 7., 3., 7., 3., 3., 3., 3., 3., 3., 15., 3.,\n 7., 3., 7., 7., 15., 3., 7., 7., 15., 7., 7., 3., 7.,\n 7., 7., 15., 7., 7., 3., 7., 7., 3., 7., 7., 7., 7.,\n 7., 3., 7., 7., 7., 7., 7., 3., 1., 3., 3., 7., 7.,\n 7., 7., 3., 7., 3., 7., 7., 7., 7., 3., 7., 7., 7.,\n 3., 7., 3., 7., 11., 7., 3., 3., 3., 7., 7., 7., 7.,\n 7., 7., 3., 7., 3., 7., 7., 7., 3., 3., 7., 7., 7.,\n 7., 7., 7., 7., 3., 7., 7., 7., 7., 3., 7., 3., 7.,\n 3., 7., 1., 7., 7., 7., 7., 7., 3., 3., 7., 7., 7.,\n 7., 7., 7., 7., 7., 7., 3., 7., 7., 3., 3., 7., 7.,\n 7., 7., 7., 7., 7., 3., 3., 3., 3., 3., 7., 7., 7.,\n 3., 3., 3., 7., 7., 7., 7., 3., 7., 7., 7., 7., 7.,\n 3., 7., 7., 3., 3., 7., 7., 7., 3., 7., 7., 7., 7.,\n 7., 7., 3., 3., 7., 7.]]) process_time_diff
(chain, draw)
float64
0.009755 0.00941 ... 0.01065
array([[0.00975538, 0.00941049, 0.00999948, 0.00929125, 0.00551209,\n 0.00497061, 0.01069201, 0.00947549, 0.00529213, 0.00505167,\n 0.00984821, 0.00533182, 0.00487694, 0.00468126, 0.00564767,\n 0.00979147, 0.01083373, 0.01003699, 0.00967509, 0.01146689,\n 0.00942126, 0.00982687, 0.00454481, 0.01032535, 0.00454083,\n 0.00502061, 0.00939897, 0.00973176, 0.00472506, 0.00456349,\n 0.0096691 , 0.00899704, 0.01050028, 0.0096949 , 0.0026025 ,\n 0.00478206, 0.01015688, 0.00965208, 0.01483041, 0.00642694,\n 0.01256793, 0.00806139, 0.01231472, 0.00722911, 0.00502747,\n 0.00698748, 0.00920683, 0.01010853, 0.00467047, 0.0100342 ,\n 0.00972371, 0.00967817, 0.00444719, 0.00541691, 0.0047563 ,\n 0.00927449, 0.00956278, 0.00984005, 0.0100117 , 0.00521552,\n 0.00924579, 0.01027786, 0.00898144, 0.01019242, 0.00958831,\n 0.01951203, 0.0092896 , 0.00966934, 0.01965651, 0.00445681,\n 0.00973549, 0.00936654, 0.00959462, 0.00440857, 0.00995087,\n 0.00953933, 0.00498651, 0.00471925, 0.004487 , 0.00525815,\n 0.00912682, 0.00482729, 0.00957352, 0.01082284, 0.00959675,\n 0.01457986, 0.00747343, 0.01079252, 0.00820227, 0.01124633,\n 0.00920032, 0.01454033, 0.00963321, 0.01020733, 0.00991318,\n 0.00469721, 0.00978734, 0.00911167, 0.00997474, 0.00939938,\n...\n 0.00964861, 0.01001815, 0.01028483, 0.00919476, 0.01032207,\n 0.00454816, 0.01005422, 0.00461314, 0.01006749, 0.00981195,\n 0.01045004, 0.00468337, 0.00520851, 0.00974952, 0.01038776,\n 0.00938626, 0.01037468, 0.01033151, 0.00966043, 0.01050342,\n 0.00499815, 0.01135568, 0.01004927, 0.01054908, 0.01004009,\n 0.00483218, 0.00929484, 0.00509028, 0.00913833, 0.00544836,\n 0.00923033, 0.00277529, 0.0101487 , 0.0102663 , 0.01000788,\n 0.01060859, 0.01008415, 0.00522654, 0.00480347, 0.01026748,\n 0.00939217, 0.01065061, 0.01013464, 0.00938753, 0.01025269,\n 0.00995542, 0.00999043, 0.01006585, 0.00498049, 0.01021845,\n 0.00976333, 0.00513462, 0.00570519, 0.00957106, 0.0104617 ,\n 0.01003352, 0.00937745, 0.01090391, 0.00940274, 0.00993252,\n 0.00453879, 0.00542772, 0.00481421, 0.00442143, 0.00501925,\n 0.00985907, 0.01075088, 0.00944716, 0.00600061, 0.00502537,\n 0.00456982, 0.01113173, 0.01117217, 0.00956422, 0.01081328,\n 0.00474575, 0.01055527, 0.00910964, 0.01060755, 0.00995624,\n 0.00950729, 0.00525596, 0.00992569, 0.01149059, 0.00492343,\n 0.00544062, 0.01633912, 0.0151078 , 0.01092321, 0.00469949,\n 0.011216 , 0.01079706, 0.00970234, 0.01065161, 0.01050351,\n 0.0098318 , 0.0051352 , 0.0051856 , 0.00974127, 0.01065099]]) lp
(chain, draw)
float64
-1.028e+03 -1.031e+03 ... -1.03e+03
array([[-1028.44792313, -1030.55058555, -1029.27880953, -1026.4846721 ,\n -1026.4846721 , -1027.2133356 , -1026.98417368, -1027.0566701 ,\n -1029.04960018, -1030.02151797, -1029.39371306, -1028.45915984,\n -1028.72114435, -1030.18286844, -1031.06185804, -1027.20815393,\n -1029.38135093, -1028.96655087, -1027.48812799, -1027.28501879,\n -1029.9879406 , -1028.46655521, -1029.64401871, -1028.56017099,\n -1030.63946885, -1029.133859 , -1026.56895409, -1026.92456989,\n -1027.07008787, -1027.22823264, -1027.5503443 , -1028.73411622,\n -1029.6978903 , -1029.61892582, -1031.11532831, -1027.18368535,\n -1026.89879765, -1026.67261427, -1028.19191836, -1029.37567521,\n -1026.72949297, -1028.37227749, -1027.46074665, -1028.17251719,\n -1027.62069027, -1028.66916935, -1028.90347981, -1027.78039164,\n -1026.90744579, -1032.90828344, -1028.90017206, -1028.22222644,\n -1026.83719496, -1026.51627042, -1027.27815871, -1026.96592321,\n -1026.82508346, -1026.8832871 , -1028.85256155, -1030.17173397,\n -1027.25642775, -1026.81266583, -1027.09359773, -1027.8634355 ,\n -1028.65999812, -1032.35655785, -1028.35549712, -1028.78254887,\n -1029.91404587, -1027.03704181, -1027.09292129, -1029.77003066,\n -1029.10701596, -1027.92540779, -1026.70219624, -1026.49495367,\n -1026.74976862, -1030.09062495, -1029.34225316, -1026.97482984,\n...\n -1026.60220666, -1026.72484541, -1027.37874886, -1027.11428203,\n -1027.27725101, -1028.68347241, -1030.26189979, -1027.46926754,\n -1028.01825385, -1028.88706011, -1027.75263725, -1027.75263725,\n -1027.00217267, -1029.40298515, -1026.91476434, -1027.54146858,\n -1026.73818187, -1027.89760233, -1027.00897166, -1026.95054961,\n -1027.00360884, -1027.05459025, -1026.61391619, -1028.74440697,\n -1027.74914552, -1029.95462576, -1026.9697908 , -1028.42528416,\n -1031.15176397, -1028.29529726, -1029.43889615, -1028.79020305,\n -1028.71492311, -1028.22247883, -1029.24680699, -1028.9662911 ,\n -1026.32690381, -1027.29813047, -1026.77013514, -1031.57788774,\n -1031.57788774, -1031.19664642, -1027.08611109, -1029.21475839,\n -1027.65294529, -1026.73610218, -1027.02025798, -1026.69064724,\n -1028.67616292, -1027.86978818, -1027.56427503, -1027.0177945 ,\n -1027.16101846, -1027.3070589 , -1027.65541954, -1027.60249901,\n -1026.67158508, -1026.61729356, -1027.44493041, -1027.1442318 ,\n -1027.42899565, -1030.23103077, -1027.41273073, -1027.71529007,\n -1029.09577564, -1027.81423159, -1026.59353155, -1027.92818409,\n -1028.05621991, -1027.23685229, -1030.88043968, -1028.64385962,\n -1030.19322284, -1027.71881078, -1027.18228597, -1028.51724272,\n -1027.6264824 , -1028.49398803, -1030.48218462, -1029.64527543]]) perf_counter_start
(chain, draw)
float64
2.438e+03 2.438e+03 ... 2.457e+03
array([[2437.72839844, 2437.73830966, 2437.74784665, 2437.75798448,\n 2437.76742039, 2437.7730837 , 2437.77812905, 2437.78892388,\n 2437.79853098, 2437.80397154, 2437.80916356, 2437.81913656,\n 2437.82460193, 2437.82961924, 2437.83441981, 2437.84022289,\n 2437.85015539, 2437.86114366, 2437.87133247, 2437.88115789,\n 2437.89274235, 2437.90232373, 2437.91229201, 2437.91696219,\n 2437.92740673, 2437.93204395, 2437.93719736, 2437.94673846,\n 2437.95660245, 2437.96144767, 2437.96611349, 2437.97591302,\n 2437.98499285, 2437.99561962, 2438.00545782, 2438.00819085,\n 2438.01309679, 2438.0232871 , 2438.03306379, 2438.04804214,\n 2438.05466035, 2438.06736928, 2438.07561754, 2438.08815703,\n 2438.09551576, 2438.10055803, 2438.1076419 , 2438.11698782,\n 2438.12721713, 2438.13193642, 2438.14204639, 2438.15187775,\n 2438.1617121 , 2438.16626786, 2438.17181467, 2438.17670198,\n 2438.18617547, 2438.19588357, 2438.20580543, 2438.21595146,\n 2438.22131054, 2438.23070624, 2438.24113716, 2438.25017302,\n 2438.26038296, 2438.27002174, 2438.28969726, 2438.29913043,\n 2438.30893827, 2438.32860658, 2438.33316515, 2438.34304904,\n 2438.35256009, 2438.36228182, 2438.3667999 , 2438.37677892,\n 2438.38645577, 2438.39157511, 2438.39641694, 2438.4010344 ,\n...\n 2456.6913688 , 2456.69643134, 2456.70791499, 2456.71803305,\n 2456.72867164, 2456.73884536, 2456.74380579, 2456.75326164,\n 2456.75848444, 2456.76778208, 2456.77335779, 2456.7827426 ,\n 2456.78572496, 2456.79602901, 2456.80627431, 2456.8164157 ,\n 2456.82709432, 2456.83727701, 2456.84263834, 2456.84758576,\n 2456.85801544, 2456.86755519, 2456.87835489, 2456.88863215,\n 2456.89816728, 2456.90856534, 2456.918734 , 2456.92889332,\n 2456.93892845, 2456.94399145, 2456.95428614, 2456.9640883 ,\n 2456.96928461, 2456.97510146, 2456.98484895, 2456.99544381,\n 2457.00561268, 2457.01507974, 2457.02611625, 2457.03573795,\n 2457.04581946, 2457.05046764, 2457.05604558, 2457.06098062,\n 2457.0654935 , 2457.07064707, 2457.08047569, 2457.09127935,\n 2457.10087316, 2457.10702809, 2457.11218119, 2457.11688262,\n 2457.12801635, 2457.13928166, 2457.14899477, 2457.15994601,\n 2457.16479736, 2457.1755075 , 2457.18477025, 2457.19536141,\n 2457.20540168, 2457.21506093, 2457.22046465, 2457.23054986,\n 2457.24213328, 2457.24718705, 2457.2527705 , 2457.26930884,\n 2457.28464275, 2457.29575406, 2457.3005297 , 2457.31184162,\n 2457.32278012, 2457.33262456, 2457.34332777, 2457.35391363,\n 2457.36388773, 2457.36917453, 2457.37444699, 2457.38420804]]) index_in_trajectory
(chain, draw)
int64
3 1 3 -3 0 -2 -2 ... 4 -2 2 2 5 -2
array([[ 3, 1, 3, -3, 0, -2, -2, -7, -2, -1, 4, -2, 2, -1, 1, -2,\n -2, -2, -2, -1, -3, -1, 1, 4, -2, 1, 6, 7, -1, -2, 4, -4,\n 7, -2, 1, -2, -3, -3, 2, 1, -3, 2, 1, 2, 2, -3, 3, -4,\n -2, -1, 3, 2, 2, -2, 3, 1, -2, -4, 4, -3, 2, 4, -2, -6,\n -5, 3, 4, -5, 3, 1, -2, 6, -2, 2, 3, -1, 2, 2, 1, 3,\n 6, -3, 1, 7, 2, -5, -2, -3, 2, -1, -2, -5, -1, 4, 3, -2,\n 2, -2, -1, -1, -2, -3, 2, 5, 1, -5, 2, -2, -2, -5, 5, 4,\n -6, 5, -3, -2, -3, -2, 3, -1, -6, -3, 2, 4, 0, -3, 2, -5,\n -1, -4, 2, 1, 1, -3, -3, -2, -3, 4, 2, -2, -4, 4, -3, 0,\n -3, 3, -3, -2, -1, 4, 5, 7, 5, -1, -3, 1, 3, -3, 2, -3,\n 1, 2, 2, 1, -4, -2, -1, 1, -3, -2, 1, -3, 5, 3, -3, 2,\n -2, 3, -2, -4, 2, -3, -2, 3, -3, -1, -2, -2, 2, 1, 2, 2,\n 7, -2, 6, 3, 2, -1, -2, 2, 2, 1, 2, 7, 1, 5, 0, 0,\n 3, -1, 2, -3, 1, -6, -3, 3, -2, -4, -2, 7, -2, -2, -6, -1,\n 2, 2, 4, 1, -2, 2, 2, 3, -1, -3, 5, -3, -3, -2, -2, 2,\n -3, -1, -2, -7, 3, 3, 5, -3, -7, 2, 3, -1, -6, -1, -1, -4,\n 2, -3, 4, 1, 2, -3, -2, -5, -4, -1, 2, 3, 2, 4, 3, -3,\n -5, 3, -2, 2, -2, 4, 6, -5, 2, 1, 2, -2, -2, 6, 2, -1,\n -2, 0, 2, -1, -3, -1, 2, 3, 3, 1, -3, 5, -2, -2, -6, -2,\n 1, 2, -7, 1, -5, -2, -2, -3, 4, 2, -1, 2, 4, -3, 3, 1,\n...\n 2, -3, 2, -2, 3, 3, -3, 6, -1, -3, -1, -6, -5, -6, -1, 2,\n -2, -1, 1, 1, -2, -1, 1, 3, 3, -2, -1, 1, 2, -1, 5, 2,\n 4, 2, 3, 1, 4, 2, 2, -3, -2, -1, -2, -7, -1, -1, 6, -3,\n -3, -1, -1, 2, 6, 3, 2, 2, -2, 0, 2, 0, -2, -4, 1, 1,\n -2, 5, 5, 2, 3, 4, 2, 2, 5, 2, -3, 2, 2, 1, -2, -3,\n 5, -2, -7, -6, -2, -5, -7, -2, 2, -3, 3, 5, -2, 3, 2, 2,\n 2, 2, 2, 2, 1, 7, 3, 2, -1, -2, 1, 3, 1, 2, -3, -2,\n 6, 3, 2, -6, 1, 3, 2, 6, -1, -1, 6, -1, -2, -1, -3, -4,\n 3, 5, -2, 5, 3, 1, 3, 3, -3, 4, -1, 1, 1, 2, -3, -2,\n -3, -3, -4, 0, 4, -3, -7, 2, 5, 2, 4, -3, -3, -3, -4, -3,\n -2, -1, 3, 2, 2, -4, -5, 3, 1, -6, 6, -5, 3, 1, -3, -4,\n -3, 1, -1, -3, 0, -1, 0, 7, 2, -2, -4, -2, -3, 3, 2, 3,\n -5, -4, 2, -1, 3, -5, 1, -5, -1, 2, 5, -1, -1, 2, -2, -6,\n 3, 2, -3, 0, 5, -2, 3, 2, 3, 2, -4, 2, -3, 1, -3, 2,\n 1, -3, 4, -3, -1, 7, 1, -3, -5, 2, -5, 2, -5, 2, -1, 0,\n -2, 5, -2, 4, -2, 3, 1, 2, -1, 4, 4, -1, -2, -2, 2, -2,\n 1, -5, -5, 3, -2, -3, -5, -2, 2, -6, 5, -5, 0, -1, -1, 3,\n 2, -2, 2, 6, -1, 2, 3, -2, 3, -2, -6, -2, -2, 7, 1, -4,\n -5, -1, 3, 1, 1, -1, 3, 5, -2, 2, 3, -4, 5, 2, 4, -2,\n 2, 2, 5, -2]]) reached_max_treedepth
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) step_size
(chain, draw)
float64
0.4627 0.4627 ... 0.7823 0.7823
array([[0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n 0.46273184, 0.46273184, 0.46273184, 0.46273184, 0.46273184,\n...\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609,\n 0.78234609, 0.78234609, 0.78234609, 0.78234609, 0.78234609]]) diverging
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) energy_error
(chain, draw)
float64
0.1802 0.7816 ... 0.8827 -0.4273
array([[ 1.80204382e-01, 7.81624139e-01, -6.83793119e-01,\n -1.07849224e+00, 0.00000000e+00, 1.60817965e-01,\n -6.74533351e-02, -4.60581358e-03, 1.62684187e-01,\n 3.93960887e-01, -4.19809964e-01, 1.73667836e-02,\n 2.00250525e-01, 2.36865183e-01, 2.39759965e-01,\n -7.06954014e-01, 7.83099737e-02, 2.82114173e-02,\n -2.04189185e-01, 1.10460943e-01, 1.26891423e-01,\n -2.08319770e-01, 1.25871408e-01, 1.79539350e-01,\n -3.71527977e-01, -8.75652821e-01, 1.48611961e-02,\n 3.03243226e-02, -8.18108726e-02, 3.24614731e-02,\n 7.70511835e-02, -2.33080278e-02, 4.38410129e-01,\n 1.05249092e-01, 4.30189207e-01, -7.19398189e-01,\n -2.16996214e-01, -1.64309401e-02, 1.09004669e-01,\n 3.52358006e-01, -6.38174984e-01, 5.84016940e-02,\n -4.72955173e-02, 1.34844002e-01, 2.34291367e-01,\n 2.98382073e-01, -5.65262600e-02, -2.06878262e-01,\n 2.29658862e-02, 1.76361842e+00, -1.64210135e+00,\n -1.19032870e-02, -3.03995798e-01, -4.02815884e-02,\n 3.31124285e-01, -3.44951415e-01, -8.05307858e-02,\n 2.18192684e-02, 6.74731397e-01, 2.13651208e-01,\n...\n 3.34787372e-02, 6.74026382e-02, 5.43131808e-01,\n 2.89243704e-02, 2.52130563e-01, -4.31120962e-01,\n 1.46766796e-01, 9.85083549e-01, -9.02103002e-01,\n 1.60964548e-01, 3.27270427e-01, -3.38925862e-01,\n -5.70057949e-01, 1.98575912e-01, -1.55075831e-01,\n -3.04005442e-01, 4.31113408e-01, -2.53690523e-01,\n 2.04120873e+00, 0.00000000e+00, -9.18338832e-02,\n -1.55293227e+00, 2.12217102e-01, -2.48152828e-01,\n -9.35971198e-02, 5.98155393e-02, -2.49179290e-02,\n 8.15901814e-01, -5.22584852e-01, -9.64993071e-02,\n -2.11310514e-01, -4.02904168e-02, 6.79200122e-02,\n 3.91358950e-02, -1.68652140e-01, -5.72945898e-02,\n -7.70522045e-03, 3.02061370e-01, -2.41078656e-01,\n -1.41269262e-02, 9.36326221e-01, -8.02963262e-01,\n 1.10547091e-01, -2.31385140e-02, -1.16056888e-01,\n -1.48097381e-01, 4.29125090e-01, 8.30058841e-02,\n -2.44901195e-01, 1.01138418e-01, -8.30262013e-02,\n 4.86895673e-02, -3.13606091e-01, 1.75760401e-01,\n 1.31526844e-01, -7.28989362e-02, -1.94735281e-02,\n 8.82658170e-01, -4.27293770e-01]]) largest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) tree_depth
(chain, draw)
int64
3 3 3 3 2 2 3 3 ... 3 3 3 3 2 2 3 3
array([[3, 3, 3, 3, 2, 2, 3, 3, 2, 2, 3, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3,\n 2, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 1, 2, 3, 3, 3, 2, 3, 2, 3, 2,\n 2, 2, 3, 3, 2, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 4,\n 3, 3, 4, 2, 3, 3, 3, 2, 3, 3, 2, 2, 2, 2, 3, 2, 3, 3, 3, 3, 2, 3,\n 2, 3, 3, 4, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 3,\n 4, 3, 3, 4, 3, 3, 3, 2, 2, 3, 3, 3, 2, 3, 1, 3, 3, 3, 2, 3, 3, 3,\n 2, 3, 2, 3, 2, 3, 3, 2, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 2, 3, 2, 2, 3, 2, 2, 2, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 3, 2, 2,\n 2, 3, 2, 3, 3, 3, 3, 3, 2, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 2,\n 2, 3, 3, 2, 3, 3, 3, 3, 2, 2, 2, 4, 2, 3, 1, 3, 3, 2, 2, 3, 3, 3,\n 3, 3, 3, 1, 3, 2, 3, 2, 3, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 2,\n 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 2, 3,\n 3, 2, 3, 3, 3, 4, 3, 2, 3, 3, 2, 2, 3, 3, 3, 3, 2, 2, 3, 2, 2, 3,\n 2, 2, 3, 2, 2, 3, 2, 3, 2, 2, 3, 2, 3, 3, 2, 3, 3, 3, 4, 3, 3, 3,\n 3, 3, 3, 3, 3, 2, 2, 3, 3, 2, 2, 3, 3, 3, 3, 1, 2, 3, 3, 3, 3, 2,\n 3, 2, 3, 2, 2, 3, 2, 3, 2, 3, 3, 3, 2, 2, 2, 3, 3, 2, 3, 3, 4, 3,\n 3, 3, 3, 4, 3, 3, 3, 3, 3, 2, 2, 2, 3, 2, 2, 3, 2, 3, 2, 3, 2, 3,\n 3, 3, 3, 3, 2, 2, 3, 3, 2, 2, 3, 2, 3, 2, 3, 2, 3, 1, 3, 2, 3, 4,\n 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 3,\n 2, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 2,\n...\n 2, 2, 3, 4, 3, 2, 3, 3, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 2, 2, 2, 3, 2, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 1, 3, 3,\n 3, 2, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 3, 3, 3, 3,\n 3, 1, 3, 3, 3, 3, 2, 3, 3, 3, 3, 2, 2, 3, 3, 2, 3, 2, 3, 3, 3, 2,\n 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3,\n 2, 2, 3, 1, 3, 2, 3, 2, 3, 3, 3, 3, 4, 3, 3, 2, 2, 3, 2, 2, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 2, 2, 2, 2, 3, 2, 2, 3,\n 3, 3, 3, 3, 3, 2, 2, 3, 3, 2, 3, 2, 2, 1, 3, 3, 3, 3, 3, 3, 3, 2,\n 2, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 2,\n 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 2, 3, 3, 3, 3, 2,\n 3, 3, 3, 2, 3, 3, 3, 3, 2, 3, 2, 2, 2, 2, 3, 3, 2, 3, 3, 3, 3, 3,\n 2, 3, 3, 3, 2, 2, 4, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3,\n 2, 2, 2, 2, 2, 2, 4, 2, 3, 2, 3, 3, 4, 2, 3, 3, 4, 3, 3, 2, 3, 3,\n 3, 4, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 1, 2,\n 2, 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 3, 2, 3, 2, 3, 4, 3,\n 2, 2, 2, 3, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 2, 2, 3, 3, 3, 3, 3,\n 3, 3, 2, 3, 3, 3, 3, 2, 3, 2, 3, 2, 3, 1, 3, 3, 3, 3, 3, 2, 2, 3,\n 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 3, 2, 2,\n 2, 2, 2, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 3,\n 2, 2, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3]]) max_energy_error
(chain, draw)
float64
-0.2293 0.7816 ... 1.199 -1.314
array([[-2.29333420e-01, 7.81624139e-01, -1.15743216e+00,\n -1.07849224e+00, 9.69908362e-01, 4.06772995e-01,\n 1.73181896e-01, -9.33782916e-02, 1.62684187e-01,\n 3.93960887e-01, -5.22644864e-01, -2.68300567e-01,\n 2.00250525e-01, 8.25883470e-01, 3.33535994e-01,\n -7.06954014e-01, 2.28638853e-01, 2.93949575e-01,\n -2.04189185e-01, 4.99671767e-01, -4.37310469e-01,\n -2.08319770e-01, 2.48708024e-01, 3.94991578e-01,\n 6.58322867e-01, -9.65849385e-01, 8.08448035e-01,\n 4.90972185e-01, -1.80694494e-01, 6.73487789e-01,\n 2.82466689e-01, -1.90355726e-01, 1.28685328e+00,\n 4.60476853e-01, 4.30189207e-01, -7.40726346e-01,\n -2.95136064e-01, 3.93527634e-01, 1.09004669e-01,\n 3.52358006e-01, 1.10842181e+00, 2.30106890e-01,\n 4.04202045e-01, 1.34844002e-01, 1.41822997e+00,\n 2.98382073e-01, -3.46242164e-01, -2.44014445e-01,\n 4.45894533e-01, 2.12570678e+00, -2.06322393e+00,\n 5.92000876e-01, -3.03995798e-01, 7.77633640e-02,\n 3.31124285e-01, 4.31522845e-01, 1.16949259e-01,\n -6.13892618e-02, 1.52595551e+00, -4.75663181e-01,\n...\n 2.05801989e-01, 3.11029895e-01, 1.14295155e+00,\n 8.72087769e-01, 4.26695705e-01, -5.43577275e-01,\n -2.03762817e-01, 9.85083549e-01, -1.41154611e+00,\n -3.23173841e-01, 3.77861864e-01, -5.58641585e-01,\n -7.30505486e-01, 6.18506718e-01, 4.87670917e-01,\n 3.35762213e-01, 5.16325367e-01, -3.87816685e-01,\n 3.42541908e+00, 1.84342609e+00, -1.47764891e+00,\n -1.55293227e+00, 7.47495296e-01, -2.71036415e-01,\n 4.52664104e-01, 2.90269998e-01, 3.13709629e-01,\n 8.15901814e-01, 6.73801990e-01, -2.75286771e-01,\n 8.05006987e-01, -1.00777474e-01, 1.01306168e-01,\n 2.99064802e-01, 3.25512735e-01, -8.42512367e-02,\n -6.67969497e-02, 4.33186017e-01, 6.08686751e-01,\n -1.50206324e-01, 9.97316525e-01, -1.03783894e+00,\n 5.47928185e-01, 7.59708619e-01, -2.36856330e-01,\n 2.39513377e-01, 4.42067199e-01, 1.39003089e+00,\n -3.04833946e-01, -2.37058489e-01, -4.44283876e-01,\n -3.78999833e-01, -3.54641194e-01, 4.83623938e-01,\n -2.13131354e-01, -4.04701860e-01, -2.90679666e-01,\n 1.19899648e+00, -1.31380609e+00]]) energy
(chain, draw)
float64
1.029e+03 1.032e+03 ... 1.032e+03
array([[1029.04886931, 1031.63136929, 1032.4858021 , 1029.31429622,\n 1031.50869785, 1027.70096919, 1028.09396571, 1027.1889641 ,\n 1029.43663632, 1030.47795134, 1033.14272545, 1029.56528295,\n 1029.24606379, 1031.87190849, 1032.41318701, 1032.68808749,\n 1030.70072234, 1033.00066073, 1032.62217925, 1030.28497956,\n 1031.25613017, 1034.20146854, 1030.26558736, 1031.11940779,\n 1033.05384894, 1030.13740763, 1037.30339204, 1028.83212852,\n 1027.34493601, 1028.73629257, 1028.19232321, 1029.32913708,\n 1032.14854876, 1033.62052297, 1031.52311481, 1031.32674082,\n 1027.27555011, 1028.81415399, 1028.78615431, 1029.67704206,\n 1030.87253736, 1028.91927676, 1030.39587248, 1028.46628795,\n 1030.58689951, 1029.19795648, 1029.58293808, 1031.77331565,\n 1028.44154086, 1034.13000331, 1032.39671078, 1030.39353892,\n 1028.51188184, 1027.16245447, 1027.37370304, 1028.48823034,\n 1027.76299646, 1027.31915194, 1031.27929138, 1030.56790129,\n 1031.1058217 , 1028.08611409, 1027.73779123, 1028.1600516 ,\n 1029.50097289, 1034.22329359, 1034.2422822 , 1029.3307873 ,\n 1030.61062917, 1031.77336912, 1027.34212041, 1031.48630071,\n 1031.21740859, 1029.77991942, 1030.79130167, 1026.86878229,\n 1028.01617498, 1031.0263761 , 1033.18039426, 1029.95689692,\n...\n 1027.38034192, 1028.4474338 , 1028.30363522, 1028.49006425,\n 1028.99136587, 1028.79933778, 1031.94823236, 1030.85626158,\n 1029.09994061, 1031.06991462, 1030.47796542, 1029.12576297,\n 1028.09443606, 1033.01707732, 1029.70255676, 1029.81998562,\n 1027.92238627, 1029.6600547 , 1029.91227304, 1027.77898767,\n 1027.42489614, 1029.45963106, 1028.16864752, 1031.70115078,\n 1031.35415798, 1032.16760299, 1030.90218981, 1028.7775468 ,\n 1032.02518368, 1031.24857048, 1031.35141114, 1031.34092621,\n 1030.18276405, 1030.48633917, 1031.84709035, 1030.63885756,\n 1030.30800231, 1027.82629973, 1027.3297388 , 1034.14339947,\n 1034.9440532 , 1033.3431128 , 1030.28327141, 1031.45430623,\n 1029.45184501, 1028.61143482, 1027.501959 , 1028.99540959,\n 1028.85609954, 1030.82375151, 1028.73779168, 1029.70266204,\n 1027.9247215 , 1028.85928826, 1030.20890628, 1028.98399327,\n 1028.39224647, 1026.87437545, 1028.42890403, 1029.74331932,\n 1028.01019631, 1031.22344153, 1030.6482975 , 1029.43626541,\n 1032.78240887, 1029.30491476, 1029.37850254, 1028.41564606,\n 1031.12956601, 1028.38274182, 1031.80310406, 1031.41714189,\n 1032.13536675, 1029.95517596, 1029.25550656, 1029.45766465,\n 1028.49740007, 1028.66905342, 1032.41823478, 1032.46584166]]) Attributes: (8)
created_at : 2026-01-08T04:55:33.411973+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 30.849383115768433 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n observed_data \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 12kB\nDimensions: (__obs__: 500, rt,response_extra_dim_0: 2)\nCoordinates:\n * __obs__ (__obs__) int64 4kB 0 1 2 3 4 ... 496 497 498 499\n * rt,response_extra_dim_0 (rt,response_extra_dim_0) int64 16B 0 1\nData variables:\n rt,response (__obs__, rt,response_extra_dim_0) float64 8kB 5...\nAttributes:\n created_at: 2026-01-08T04:55:33.418223+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions: __obs__ : 500rt,response_extra_dim_0 : 2
Coordinates: (2)
Data variables: (1)
rt,response
(__obs__, rt,response_extra_dim_0)
float64
5.251 -1.0 0.817 ... -1.0 2.098 1.0
array([[ 5.25085068, -1. ],\n [ 0.81702983, 1. ],\n [ 1.22963595, 1. ],\n [ 2.08696556, 1. ],\n [ 2.89212894, 1. ],\n [ 3.54065871, 1. ],\n [ 1.72898936, 1. ],\n [ 2.21090579, 1. ],\n [ 4.0863018 , -1. ],\n [ 8.18515587, -1. ],\n [ 1.17861474, 1. ],\n [ 1.71078253, 1. ],\n [ 1.11688149, 1. ],\n [ 1.92637146, 1. ],\n [ 4.49467134, 1. ],\n [ 1.7099334 , 1. ],\n [ 2.4099648 , 1. ],\n [ 1.55375707, 1. ],\n [ 3.20066214, -1. ],\n [ 1.17386627, 1. ],\n...\n [ 3.52440763, 1. ],\n [ 1.30275202, 1. ],\n [ 5.3520937 , 1. ],\n [ 0.9726209 , 1. ],\n [ 1.31584668, 1. ],\n [ 0.95979559, 1. ],\n [ 1.75311542, 1. ],\n [ 5.5283246 , -1. ],\n [ 3.77018714, -1. ],\n [ 9.15060711, 1. ],\n [ 3.17011929, 1. ],\n [ 1.92992651, 1. ],\n [ 1.81051934, 1. ],\n [ 1.4278065 , 1. ],\n [ 1.83704746, 1. ],\n [ 1.17173886, 1. ],\n [ 1.67033231, 1. ],\n [ 3.16141605, 1. ],\n [ 3.40105796, -1. ],\n [ 2.09810948, 1. ]]) Attributes: (6)
created_at : 2026-01-08T04:55:33.418223+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n
\n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "fwwy",
+ "code_hash": "6dfef4c4e137cd8647a7c8d5d5429347",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 36kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 7 ... 493 494 495 496 497 498 499\nData variables:\n t (chain, draw) float64 8kB 0.4858 0.4874 0.5649 ... 0.5131 0.5023\n a (chain, draw) float64 8kB 1.42 1.396 1.432 ... 1.459 1.465 1.531\n v (chain, draw) float64 8kB 0.46 0.4994 0.4201 ... 0.4756 0.4639\n z (chain, draw) float64 8kB 0.4921 0.4953 0.5207 ... 0.5448 0.5019\nAttributes:\n created_at: 2026-01-08T04:55:33.399374+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 30.849383115768433\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (4)
t
(chain, draw)
float64
0.4858 0.4874 ... 0.5131 0.5023
array([[0.48575898, 0.4874169 , 0.56489413, 0.52327668, 0.52327668,\n 0.51735751, 0.53034364, 0.47649765, 0.46284268, 0.46520331,\n 0.46083669, 0.4593662 , 0.48406028, 0.4526651 , 0.46001507,\n 0.48118006, 0.5294147 , 0.53076833, 0.4787791 , 0.47996894,\n 0.44641416, 0.48250367, 0.48269964, 0.49808578, 0.57851296,\n 0.55575702, 0.49335675, 0.53381615, 0.52763723, 0.51874272,\n 0.48449161, 0.54815292, 0.45062441, 0.521377 , 0.50926011,\n 0.52282439, 0.53104155, 0.50582164, 0.46798903, 0.47273825,\n 0.51961557, 0.55223779, 0.52318457, 0.54208213, 0.51812459,\n 0.55992107, 0.48963462, 0.54657144, 0.51707202, 0.48923537,\n 0.53118936, 0.50839344, 0.50624422, 0.51701415, 0.50237139,\n 0.52909628, 0.50828604, 0.52146805, 0.53679188, 0.4696402 ,\n 0.54550002, 0.49338531, 0.48342327, 0.55110842, 0.50910738,\n 0.51598703, 0.49536839, 0.46054526, 0.54135241, 0.53620886,\n 0.50751537, 0.49578939, 0.51854171, 0.53459467, 0.53698073,\n 0.52076779, 0.53930056, 0.55693755, 0.56330514, 0.54042356,\n 0.49135896, 0.46826939, 0.47642666, 0.52091203, 0.51972218,\n 0.49114814, 0.50305294, 0.49013528, 0.48449883, 0.48801788,\n 0.44691148, 0.55053182, 0.54364458, 0.50496596, 0.53677642,\n 0.53519451, 0.53467746, 0.49012529, 0.50976122, 0.51972499,\n...\n 0.53135932, 0.49476602, 0.49237559, 0.49237559, 0.52153435,\n 0.51780916, 0.45114472, 0.43669586, 0.5219717 , 0.49255034,\n 0.54366199, 0.50351056, 0.55812408, 0.49702379, 0.49229607,\n 0.54182283, 0.56007076, 0.51446918, 0.51673506, 0.5027071 ,\n 0.51314394, 0.51399041, 0.4969603 , 0.54656141, 0.50973111,\n 0.46628013, 0.50426948, 0.51560854, 0.49978386, 0.45752611,\n 0.47419317, 0.47419317, 0.49069551, 0.51973564, 0.49829212,\n 0.49473534, 0.50672342, 0.49824202, 0.48064452, 0.50357108,\n 0.49601765, 0.5384329 , 0.51425332, 0.4635438 , 0.5117676 ,\n 0.50766191, 0.52603337, 0.52351951, 0.5366462 , 0.49393139,\n 0.54399711, 0.50216277, 0.48067998, 0.51274 , 0.44683778,\n 0.4511459 , 0.51588204, 0.49525355, 0.52425554, 0.47243496,\n 0.47243496, 0.47541677, 0.48109169, 0.49133874, 0.47860567,\n 0.5068819 , 0.47844727, 0.52531496, 0.50954969, 0.5266407 ,\n 0.48567178, 0.52013718, 0.49702878, 0.5152404 , 0.50095719,\n 0.4890485 , 0.50336455, 0.50587956, 0.503944 , 0.47849903,\n 0.53080546, 0.53078259, 0.50781999, 0.50148963, 0.51480523,\n 0.50365531, 0.49776146, 0.52533235, 0.53007223, 0.4927408 ,\n 0.46641809, 0.46647075, 0.48059084, 0.46988261, 0.53398489,\n 0.48715129, 0.50732104, 0.47348156, 0.51305504, 0.50231702]]) a
(chain, draw)
float64
1.42 1.396 1.432 ... 1.465 1.531
array([[1.41962593, 1.39567367, 1.4317506 , 1.43815873, 1.43815873,\n 1.41506752, 1.42489165, 1.47228951, 1.54019871, 1.54261446,\n 1.50883799, 1.48778168, 1.48907592, 1.43981795, 1.47780259,\n 1.47182224, 1.49534181, 1.37663927, 1.49219117, 1.45348848,\n 1.53877615, 1.48388185, 1.50187359, 1.42217578, 1.38681059,\n 1.37511451, 1.47061828, 1.4336922 , 1.42147773, 1.41324983,\n 1.49542278, 1.38724289, 1.52539289, 1.49257124, 1.49170351,\n 1.45692004, 1.4366145 , 1.48474581, 1.52332005, 1.54322473,\n 1.4600666 , 1.41252005, 1.44291269, 1.45527941, 1.42233897,\n 1.43794569, 1.47904204, 1.43858699, 1.44538457, 1.38315857,\n 1.47805613, 1.43128946, 1.47748292, 1.47310777, 1.43448457,\n 1.45652247, 1.48214165, 1.41785655, 1.4974154 , 1.42919286,\n 1.44088427, 1.45988243, 1.47393757, 1.4156567 , 1.5180166 ,\n 1.40384394, 1.42316929, 1.45782434, 1.50473414, 1.44310825,\n 1.4585378 , 1.46519248, 1.44046144, 1.4418534 , 1.44324006,\n 1.44996868, 1.4305769 , 1.36119743, 1.42224077, 1.44935953,\n 1.49600976, 1.4710327 , 1.47940687, 1.41755625, 1.45677903,\n 1.41673891, 1.47461421, 1.45417031, 1.53012483, 1.51558344,\n 1.52305116, 1.43019528, 1.39317838, 1.49107451, 1.3956755 ,\n 1.42923222, 1.44233663, 1.43354637, 1.45811866, 1.41322255,\n...\n 1.47569475, 1.49894881, 1.454072 , 1.454072 , 1.43673935,\n 1.42883534, 1.52023953, 1.4998433 , 1.49035233, 1.43324679,\n 1.4293716 , 1.4480808 , 1.40817894, 1.38344763, 1.48465827,\n 1.42550672, 1.41047898, 1.4757535 , 1.4400986 , 1.45069438,\n 1.47423521, 1.45967282, 1.47093848, 1.42788083, 1.47629967,\n 1.48285612, 1.38494346, 1.46100182, 1.51436122, 1.46489208,\n 1.50264806, 1.50264806, 1.4876187 , 1.4092414 , 1.44732079,\n 1.42765811, 1.46670338, 1.48656924, 1.4691816 , 1.46190357,\n 1.46920654, 1.41735254, 1.45906975, 1.46405789, 1.46577055,\n 1.39656671, 1.45102417, 1.38562744, 1.40765999, 1.48777242,\n 1.3922839 , 1.44482542, 1.51418906, 1.4185966 , 1.51692311,\n 1.52406142, 1.45007398, 1.46300545, 1.42843102, 1.46536687,\n 1.46536687, 1.46885152, 1.46241395, 1.5309264 , 1.51293759,\n 1.48528343, 1.46810474, 1.46595447, 1.45276806, 1.46347675,\n 1.4390223 , 1.41264463, 1.48994292, 1.40924903, 1.46437362,\n 1.48909828, 1.4463778 , 1.44434882, 1.46632659, 1.46442781,\n 1.41721859, 1.43887618, 1.45153336, 1.45764906, 1.47446183,\n 1.45247501, 1.4773564 , 1.41632352, 1.48490392, 1.44130061,\n 1.54995306, 1.46375494, 1.53193968, 1.4798981 , 1.46266293,\n 1.43188296, 1.46957178, 1.45873085, 1.4645994 , 1.53145956]]) v
(chain, draw)
float64
0.46 0.4994 ... 0.4756 0.4639
array([[0.46002474, 0.49935788, 0.42010292, 0.47832265, 0.47832265,\n 0.49531407, 0.42805387, 0.47274173, 0.49211158, 0.46531771,\n 0.52405831, 0.52751463, 0.48996999, 0.48740123, 0.45327257,\n 0.44433403, 0.3999858 , 0.4400614 , 0.49898925, 0.46203263,\n 0.51281462, 0.54555939, 0.58259382, 0.40303473, 0.44202404,\n 0.44628367, 0.47327411, 0.44126281, 0.42563734, 0.44816899,\n 0.51424922, 0.40406922, 0.50454619, 0.54675165, 0.56177533,\n 0.42184901, 0.44984197, 0.45478304, 0.48355605, 0.47857872,\n 0.42284614, 0.42280862, 0.39300878, 0.44165036, 0.40855478,\n 0.46295939, 0.38347846, 0.43803047, 0.41221236, 0.40121386,\n 0.48724416, 0.41713424, 0.4894201 , 0.45898768, 0.45573498,\n 0.49215874, 0.45280984, 0.4394082 , 0.47264357, 0.41767965,\n 0.42896376, 0.4754212 , 0.49607791, 0.42347567, 0.51022987,\n 0.34891115, 0.49754998, 0.46131279, 0.45161885, 0.4376886 ,\n 0.46499653, 0.47842048, 0.54809135, 0.51327392, 0.44520869,\n 0.44395821, 0.4539666 , 0.44784109, 0.41385033, 0.44637958,\n 0.49903689, 0.4313524 , 0.46596975, 0.42145104, 0.42400477,\n 0.45833683, 0.44103262, 0.47911584, 0.5030067 , 0.54724429,\n 0.47889834, 0.50877738, 0.46336246, 0.3988057 , 0.47521016,\n 0.53218054, 0.45433204, 0.46957592, 0.44154727, 0.42166304,\n...\n 0.46086418, 0.45642178, 0.43570236, 0.43570236, 0.4829974 ,\n 0.4666672 , 0.52365082, 0.54384614, 0.433968 , 0.39185751,\n 0.49576688, 0.44625795, 0.47557563, 0.43994571, 0.54460588,\n 0.46881728, 0.44246676, 0.43203313, 0.47421233, 0.45738979,\n 0.44339128, 0.49097843, 0.50331195, 0.44334794, 0.44424603,\n 0.44985518, 0.47824584, 0.48500045, 0.45922198, 0.44563862,\n 0.48540394, 0.48540394, 0.47877128, 0.41778203, 0.5043077 ,\n 0.44035812, 0.47148907, 0.51071731, 0.49095185, 0.52204284,\n 0.51396949, 0.47694901, 0.44565396, 0.43047348, 0.40925179,\n 0.54186569, 0.43236364, 0.46116241, 0.42952852, 0.40603855,\n 0.41185788, 0.52819373, 0.46235274, 0.52754265, 0.47926358,\n 0.45898006, 0.48723861, 0.4859017 , 0.46183195, 0.53758073,\n 0.53758073, 0.52305545, 0.49480701, 0.48161108, 0.46251392,\n 0.48742003, 0.46839586, 0.47882352, 0.50716931, 0.51746354,\n 0.452746 , 0.4437606 , 0.51680848, 0.46444835, 0.40887173,\n 0.42780076, 0.469227 , 0.43542738, 0.46011848, 0.47629926,\n 0.4202373 , 0.36867242, 0.52693297, 0.47675539, 0.56824748,\n 0.54620153, 0.46019097, 0.48401945, 0.47754063, 0.45489857,\n 0.56569335, 0.43165606, 0.57073731, 0.51990311, 0.46522088,\n 0.53405837, 0.49894122, 0.54383762, 0.47557582, 0.46393061]]) z
(chain, draw)
float64
0.4921 0.4953 ... 0.5448 0.5019
array([[0.49214773, 0.49526222, 0.52065169, 0.50104238, 0.50104238,\n 0.50071145, 0.50939918, 0.50037135, 0.49347186, 0.49116017,\n 0.46882245, 0.49284441, 0.47529894, 0.47943944, 0.46911097,\n 0.5110609 , 0.54573222, 0.50257665, 0.48516217, 0.50222268,\n 0.47693099, 0.4725742 , 0.48062739, 0.51638469, 0.51507879,\n 0.52305171, 0.49708241, 0.50711891, 0.50880663, 0.49950493,\n 0.5061342 , 0.53070657, 0.47466182, 0.51307581, 0.51866077,\n 0.50979223, 0.52732014, 0.51605704, 0.5082604 , 0.49617651,\n 0.51909792, 0.53961015, 0.52655521, 0.5407605 , 0.50878472,\n 0.53094502, 0.51847335, 0.53677114, 0.5182387 , 0.51909091,\n 0.53485403, 0.49622814, 0.51531898, 0.51205144, 0.51825837,\n 0.51595277, 0.52232993, 0.50775073, 0.52043147, 0.50780626,\n 0.52627376, 0.49105109, 0.50583662, 0.51454489, 0.51166629,\n 0.54910688, 0.5072023 , 0.50350706, 0.52234158, 0.51050429,\n 0.52470601, 0.46930605, 0.50928572, 0.49088719, 0.52006544,\n 0.52272162, 0.51856312, 0.52149496, 0.5162203 , 0.52257794,\n 0.51128888, 0.50527449, 0.49711225, 0.5059195 , 0.52861927,\n 0.50761143, 0.5138448 , 0.50312694, 0.49573275, 0.48988484,\n 0.48831786, 0.51759673, 0.52700703, 0.53236432, 0.49631835,\n 0.51226661, 0.51636912, 0.49635375, 0.4924294 , 0.49782483,\n...\n 0.52986081, 0.51755342, 0.50477203, 0.50477203, 0.51480378,\n 0.49696597, 0.49560143, 0.47340147, 0.52754865, 0.51601404,\n 0.52566238, 0.50630206, 0.50571067, 0.51737786, 0.49776678,\n 0.50774332, 0.5138328 , 0.5173622 , 0.50385519, 0.52668574,\n 0.51967973, 0.49394986, 0.48228597, 0.52173284, 0.5007935 ,\n 0.51963708, 0.50798951, 0.48752222, 0.50583894, 0.49544127,\n 0.48805774, 0.48805774, 0.51450044, 0.49002662, 0.50186736,\n 0.50550457, 0.49502883, 0.51691578, 0.48990291, 0.49806719,\n 0.48693259, 0.50484327, 0.50447346, 0.50352883, 0.50816364,\n 0.48608033, 0.50795832, 0.50649649, 0.48120236, 0.5095809 ,\n 0.50024496, 0.51276875, 0.49020348, 0.48025969, 0.50855525,\n 0.5057506 , 0.50356763, 0.51698105, 0.51985403, 0.51618664,\n 0.51618664, 0.52155587, 0.49933288, 0.52577785, 0.50430775,\n 0.51066993, 0.49952365, 0.51502769, 0.52448468, 0.491171 ,\n 0.49573328, 0.51049909, 0.49443511, 0.49829613, 0.50818895,\n 0.52376491, 0.49390018, 0.51250314, 0.52820319, 0.49180201,\n 0.5296624 , 0.50902894, 0.50374124, 0.48193598, 0.48672799,\n 0.49286581, 0.51510466, 0.52152346, 0.5045378 , 0.51264841,\n 0.48071807, 0.49692682, 0.48801087, 0.48245312, 0.50786497,\n 0.48692662, 0.48323057, 0.48457479, 0.54476633, 0.5018969 ]]) Attributes: (8)
created_at : 2026-01-08T04:55:33.399374+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 30.849383115768433 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "LJZf",
+ "code_hash": "951eddecca5029c3c95bb7fa16dc64f9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.DataArray 'a' (chain: 2, draw: 5)> Size: 80B\narray([[1.41962593, 1.39567367, 1.4317506 , 1.43815873, 1.43815873],\n [1.49850733, 1.46560166, 1.45534524, 1.3908701 , 1.49430976]])\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 40B 0 1 2 3 4 "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "jxvo",
+ "code_hash": "2a2f7ab87f0f5fdbe6e00a9916195dcb",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "<class 'numpy.ndarray'> "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "mWxS",
+ "code_hash": "efe5b9b6b5e607baadafd94a3add68e9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "YWSi",
+ "code_hash": "1bb0635197876b993e437259b446d9d7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 56kB\nDimensions: (sample: 1000)\nCoordinates:\n * sample (sample) object 8kB MultiIndex\n * chain (sample) int64 8kB 0 0 0 0 0 0 0 0 0 0 0 ... 1 1 1 1 1 1 1 1 1 1 1\n * draw (sample) int64 8kB 0 1 2 3 4 5 6 7 ... 493 494 495 496 497 498 499\nData variables:\n t (sample) float64 8kB 0.4858 0.4874 0.5649 ... 0.4735 0.5131 0.5023\n a (sample) float64 8kB 1.42 1.396 1.432 1.438 ... 1.459 1.465 1.531\n v (sample) float64 8kB 0.46 0.4994 0.4201 ... 0.5438 0.4756 0.4639\n z (sample) float64 8kB 0.4921 0.4953 0.5207 ... 0.4846 0.5448 0.5019\nAttributes:\n created_at: 2026-01-08T04:55:33.399374+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 30.849383115768433\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (3)
Data variables: (4)
t
(sample)
float64
0.4858 0.4874 ... 0.5131 0.5023
array([0.48575898, 0.4874169 , 0.56489413, 0.52327668, 0.52327668,\n 0.51735751, 0.53034364, 0.47649765, 0.46284268, 0.46520331,\n 0.46083669, 0.4593662 , 0.48406028, 0.4526651 , 0.46001507,\n 0.48118006, 0.5294147 , 0.53076833, 0.4787791 , 0.47996894,\n 0.44641416, 0.48250367, 0.48269964, 0.49808578, 0.57851296,\n 0.55575702, 0.49335675, 0.53381615, 0.52763723, 0.51874272,\n 0.48449161, 0.54815292, 0.45062441, 0.521377 , 0.50926011,\n 0.52282439, 0.53104155, 0.50582164, 0.46798903, 0.47273825,\n 0.51961557, 0.55223779, 0.52318457, 0.54208213, 0.51812459,\n 0.55992107, 0.48963462, 0.54657144, 0.51707202, 0.48923537,\n 0.53118936, 0.50839344, 0.50624422, 0.51701415, 0.50237139,\n 0.52909628, 0.50828604, 0.52146805, 0.53679188, 0.4696402 ,\n 0.54550002, 0.49338531, 0.48342327, 0.55110842, 0.50910738,\n 0.51598703, 0.49536839, 0.46054526, 0.54135241, 0.53620886,\n 0.50751537, 0.49578939, 0.51854171, 0.53459467, 0.53698073,\n 0.52076779, 0.53930056, 0.55693755, 0.56330514, 0.54042356,\n 0.49135896, 0.46826939, 0.47642666, 0.52091203, 0.51972218,\n 0.49114814, 0.50305294, 0.49013528, 0.48449883, 0.48801788,\n 0.44691148, 0.55053182, 0.54364458, 0.50496596, 0.53677642,\n 0.53519451, 0.53467746, 0.49012529, 0.50976122, 0.51972499,\n...\n 0.53135932, 0.49476602, 0.49237559, 0.49237559, 0.52153435,\n 0.51780916, 0.45114472, 0.43669586, 0.5219717 , 0.49255034,\n 0.54366199, 0.50351056, 0.55812408, 0.49702379, 0.49229607,\n 0.54182283, 0.56007076, 0.51446918, 0.51673506, 0.5027071 ,\n 0.51314394, 0.51399041, 0.4969603 , 0.54656141, 0.50973111,\n 0.46628013, 0.50426948, 0.51560854, 0.49978386, 0.45752611,\n 0.47419317, 0.47419317, 0.49069551, 0.51973564, 0.49829212,\n 0.49473534, 0.50672342, 0.49824202, 0.48064452, 0.50357108,\n 0.49601765, 0.5384329 , 0.51425332, 0.4635438 , 0.5117676 ,\n 0.50766191, 0.52603337, 0.52351951, 0.5366462 , 0.49393139,\n 0.54399711, 0.50216277, 0.48067998, 0.51274 , 0.44683778,\n 0.4511459 , 0.51588204, 0.49525355, 0.52425554, 0.47243496,\n 0.47243496, 0.47541677, 0.48109169, 0.49133874, 0.47860567,\n 0.5068819 , 0.47844727, 0.52531496, 0.50954969, 0.5266407 ,\n 0.48567178, 0.52013718, 0.49702878, 0.5152404 , 0.50095719,\n 0.4890485 , 0.50336455, 0.50587956, 0.503944 , 0.47849903,\n 0.53080546, 0.53078259, 0.50781999, 0.50148963, 0.51480523,\n 0.50365531, 0.49776146, 0.52533235, 0.53007223, 0.4927408 ,\n 0.46641809, 0.46647075, 0.48059084, 0.46988261, 0.53398489,\n 0.48715129, 0.50732104, 0.47348156, 0.51305504, 0.50231702]) a
(sample)
float64
1.42 1.396 1.432 ... 1.465 1.531
array([1.41962593, 1.39567367, 1.4317506 , 1.43815873, 1.43815873,\n 1.41506752, 1.42489165, 1.47228951, 1.54019871, 1.54261446,\n 1.50883799, 1.48778168, 1.48907592, 1.43981795, 1.47780259,\n 1.47182224, 1.49534181, 1.37663927, 1.49219117, 1.45348848,\n 1.53877615, 1.48388185, 1.50187359, 1.42217578, 1.38681059,\n 1.37511451, 1.47061828, 1.4336922 , 1.42147773, 1.41324983,\n 1.49542278, 1.38724289, 1.52539289, 1.49257124, 1.49170351,\n 1.45692004, 1.4366145 , 1.48474581, 1.52332005, 1.54322473,\n 1.4600666 , 1.41252005, 1.44291269, 1.45527941, 1.42233897,\n 1.43794569, 1.47904204, 1.43858699, 1.44538457, 1.38315857,\n 1.47805613, 1.43128946, 1.47748292, 1.47310777, 1.43448457,\n 1.45652247, 1.48214165, 1.41785655, 1.4974154 , 1.42919286,\n 1.44088427, 1.45988243, 1.47393757, 1.4156567 , 1.5180166 ,\n 1.40384394, 1.42316929, 1.45782434, 1.50473414, 1.44310825,\n 1.4585378 , 1.46519248, 1.44046144, 1.4418534 , 1.44324006,\n 1.44996868, 1.4305769 , 1.36119743, 1.42224077, 1.44935953,\n 1.49600976, 1.4710327 , 1.47940687, 1.41755625, 1.45677903,\n 1.41673891, 1.47461421, 1.45417031, 1.53012483, 1.51558344,\n 1.52305116, 1.43019528, 1.39317838, 1.49107451, 1.3956755 ,\n 1.42923222, 1.44233663, 1.43354637, 1.45811866, 1.41322255,\n...\n 1.47569475, 1.49894881, 1.454072 , 1.454072 , 1.43673935,\n 1.42883534, 1.52023953, 1.4998433 , 1.49035233, 1.43324679,\n 1.4293716 , 1.4480808 , 1.40817894, 1.38344763, 1.48465827,\n 1.42550672, 1.41047898, 1.4757535 , 1.4400986 , 1.45069438,\n 1.47423521, 1.45967282, 1.47093848, 1.42788083, 1.47629967,\n 1.48285612, 1.38494346, 1.46100182, 1.51436122, 1.46489208,\n 1.50264806, 1.50264806, 1.4876187 , 1.4092414 , 1.44732079,\n 1.42765811, 1.46670338, 1.48656924, 1.4691816 , 1.46190357,\n 1.46920654, 1.41735254, 1.45906975, 1.46405789, 1.46577055,\n 1.39656671, 1.45102417, 1.38562744, 1.40765999, 1.48777242,\n 1.3922839 , 1.44482542, 1.51418906, 1.4185966 , 1.51692311,\n 1.52406142, 1.45007398, 1.46300545, 1.42843102, 1.46536687,\n 1.46536687, 1.46885152, 1.46241395, 1.5309264 , 1.51293759,\n 1.48528343, 1.46810474, 1.46595447, 1.45276806, 1.46347675,\n 1.4390223 , 1.41264463, 1.48994292, 1.40924903, 1.46437362,\n 1.48909828, 1.4463778 , 1.44434882, 1.46632659, 1.46442781,\n 1.41721859, 1.43887618, 1.45153336, 1.45764906, 1.47446183,\n 1.45247501, 1.4773564 , 1.41632352, 1.48490392, 1.44130061,\n 1.54995306, 1.46375494, 1.53193968, 1.4798981 , 1.46266293,\n 1.43188296, 1.46957178, 1.45873085, 1.4645994 , 1.53145956]) v
(sample)
float64
0.46 0.4994 ... 0.4756 0.4639
array([0.46002474, 0.49935788, 0.42010292, 0.47832265, 0.47832265,\n 0.49531407, 0.42805387, 0.47274173, 0.49211158, 0.46531771,\n 0.52405831, 0.52751463, 0.48996999, 0.48740123, 0.45327257,\n 0.44433403, 0.3999858 , 0.4400614 , 0.49898925, 0.46203263,\n 0.51281462, 0.54555939, 0.58259382, 0.40303473, 0.44202404,\n 0.44628367, 0.47327411, 0.44126281, 0.42563734, 0.44816899,\n 0.51424922, 0.40406922, 0.50454619, 0.54675165, 0.56177533,\n 0.42184901, 0.44984197, 0.45478304, 0.48355605, 0.47857872,\n 0.42284614, 0.42280862, 0.39300878, 0.44165036, 0.40855478,\n 0.46295939, 0.38347846, 0.43803047, 0.41221236, 0.40121386,\n 0.48724416, 0.41713424, 0.4894201 , 0.45898768, 0.45573498,\n 0.49215874, 0.45280984, 0.4394082 , 0.47264357, 0.41767965,\n 0.42896376, 0.4754212 , 0.49607791, 0.42347567, 0.51022987,\n 0.34891115, 0.49754998, 0.46131279, 0.45161885, 0.4376886 ,\n 0.46499653, 0.47842048, 0.54809135, 0.51327392, 0.44520869,\n 0.44395821, 0.4539666 , 0.44784109, 0.41385033, 0.44637958,\n 0.49903689, 0.4313524 , 0.46596975, 0.42145104, 0.42400477,\n 0.45833683, 0.44103262, 0.47911584, 0.5030067 , 0.54724429,\n 0.47889834, 0.50877738, 0.46336246, 0.3988057 , 0.47521016,\n 0.53218054, 0.45433204, 0.46957592, 0.44154727, 0.42166304,\n...\n 0.46086418, 0.45642178, 0.43570236, 0.43570236, 0.4829974 ,\n 0.4666672 , 0.52365082, 0.54384614, 0.433968 , 0.39185751,\n 0.49576688, 0.44625795, 0.47557563, 0.43994571, 0.54460588,\n 0.46881728, 0.44246676, 0.43203313, 0.47421233, 0.45738979,\n 0.44339128, 0.49097843, 0.50331195, 0.44334794, 0.44424603,\n 0.44985518, 0.47824584, 0.48500045, 0.45922198, 0.44563862,\n 0.48540394, 0.48540394, 0.47877128, 0.41778203, 0.5043077 ,\n 0.44035812, 0.47148907, 0.51071731, 0.49095185, 0.52204284,\n 0.51396949, 0.47694901, 0.44565396, 0.43047348, 0.40925179,\n 0.54186569, 0.43236364, 0.46116241, 0.42952852, 0.40603855,\n 0.41185788, 0.52819373, 0.46235274, 0.52754265, 0.47926358,\n 0.45898006, 0.48723861, 0.4859017 , 0.46183195, 0.53758073,\n 0.53758073, 0.52305545, 0.49480701, 0.48161108, 0.46251392,\n 0.48742003, 0.46839586, 0.47882352, 0.50716931, 0.51746354,\n 0.452746 , 0.4437606 , 0.51680848, 0.46444835, 0.40887173,\n 0.42780076, 0.469227 , 0.43542738, 0.46011848, 0.47629926,\n 0.4202373 , 0.36867242, 0.52693297, 0.47675539, 0.56824748,\n 0.54620153, 0.46019097, 0.48401945, 0.47754063, 0.45489857,\n 0.56569335, 0.43165606, 0.57073731, 0.51990311, 0.46522088,\n 0.53405837, 0.49894122, 0.54383762, 0.47557582, 0.46393061]) z
(sample)
float64
0.4921 0.4953 ... 0.5448 0.5019
array([0.49214773, 0.49526222, 0.52065169, 0.50104238, 0.50104238,\n 0.50071145, 0.50939918, 0.50037135, 0.49347186, 0.49116017,\n 0.46882245, 0.49284441, 0.47529894, 0.47943944, 0.46911097,\n 0.5110609 , 0.54573222, 0.50257665, 0.48516217, 0.50222268,\n 0.47693099, 0.4725742 , 0.48062739, 0.51638469, 0.51507879,\n 0.52305171, 0.49708241, 0.50711891, 0.50880663, 0.49950493,\n 0.5061342 , 0.53070657, 0.47466182, 0.51307581, 0.51866077,\n 0.50979223, 0.52732014, 0.51605704, 0.5082604 , 0.49617651,\n 0.51909792, 0.53961015, 0.52655521, 0.5407605 , 0.50878472,\n 0.53094502, 0.51847335, 0.53677114, 0.5182387 , 0.51909091,\n 0.53485403, 0.49622814, 0.51531898, 0.51205144, 0.51825837,\n 0.51595277, 0.52232993, 0.50775073, 0.52043147, 0.50780626,\n 0.52627376, 0.49105109, 0.50583662, 0.51454489, 0.51166629,\n 0.54910688, 0.5072023 , 0.50350706, 0.52234158, 0.51050429,\n 0.52470601, 0.46930605, 0.50928572, 0.49088719, 0.52006544,\n 0.52272162, 0.51856312, 0.52149496, 0.5162203 , 0.52257794,\n 0.51128888, 0.50527449, 0.49711225, 0.5059195 , 0.52861927,\n 0.50761143, 0.5138448 , 0.50312694, 0.49573275, 0.48988484,\n 0.48831786, 0.51759673, 0.52700703, 0.53236432, 0.49631835,\n 0.51226661, 0.51636912, 0.49635375, 0.4924294 , 0.49782483,\n...\n 0.52986081, 0.51755342, 0.50477203, 0.50477203, 0.51480378,\n 0.49696597, 0.49560143, 0.47340147, 0.52754865, 0.51601404,\n 0.52566238, 0.50630206, 0.50571067, 0.51737786, 0.49776678,\n 0.50774332, 0.5138328 , 0.5173622 , 0.50385519, 0.52668574,\n 0.51967973, 0.49394986, 0.48228597, 0.52173284, 0.5007935 ,\n 0.51963708, 0.50798951, 0.48752222, 0.50583894, 0.49544127,\n 0.48805774, 0.48805774, 0.51450044, 0.49002662, 0.50186736,\n 0.50550457, 0.49502883, 0.51691578, 0.48990291, 0.49806719,\n 0.48693259, 0.50484327, 0.50447346, 0.50352883, 0.50816364,\n 0.48608033, 0.50795832, 0.50649649, 0.48120236, 0.5095809 ,\n 0.50024496, 0.51276875, 0.49020348, 0.48025969, 0.50855525,\n 0.5057506 , 0.50356763, 0.51698105, 0.51985403, 0.51618664,\n 0.51618664, 0.52155587, 0.49933288, 0.52577785, 0.50430775,\n 0.51066993, 0.49952365, 0.51502769, 0.52448468, 0.491171 ,\n 0.49573328, 0.51049909, 0.49443511, 0.49829613, 0.50818895,\n 0.52376491, 0.49390018, 0.51250314, 0.52820319, 0.49180201,\n 0.5296624 , 0.50902894, 0.50374124, 0.48193598, 0.48672799,\n 0.49286581, 0.51510466, 0.52152346, 0.5045378 , 0.51264841,\n 0.48071807, 0.49692682, 0.48801087, 0.48245312, 0.50786497,\n 0.48692662, 0.48323057, 0.48457479, 0.54476633, 0.5018969 ]) Attributes: (8)
created_at : 2026-01-08T04:55:33.399374+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 30.849383115768433 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "tZnO",
+ "code_hash": "81738950ca38d6577095620f3f0c23fc",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 56kB\nDimensions: (sample: 1000)\nCoordinates:\n * sample (sample) object 8kB MultiIndex\n * chain (sample) int64 8kB 0 0 0 0 0 0 0 0 0 0 0 ... 1 1 1 1 1 1 1 1 1 1 1\n * draw (sample) int64 8kB 0 1 2 3 4 5 6 7 ... 493 494 495 496 497 498 499\nData variables:\n t (sample) float64 8kB 0.4858 0.4874 0.5649 ... 0.4735 0.5131 0.5023\n a (sample) float64 8kB 1.42 1.396 1.432 1.438 ... 1.459 1.465 1.531\n v (sample) float64 8kB 0.46 0.4994 0.4201 ... 0.5438 0.4756 0.4639\n z (sample) float64 8kB 0.4921 0.4953 0.5207 ... 0.4846 0.5448 0.5019\nAttributes:\n created_at: 2026-01-08T04:55:33.399374+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 30.849383115768433\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (3)
Data variables: (4)
t
(sample)
float64
0.4858 0.4874 ... 0.5131 0.5023
array([0.48575898, 0.4874169 , 0.56489413, 0.52327668, 0.52327668,\n 0.51735751, 0.53034364, 0.47649765, 0.46284268, 0.46520331,\n 0.46083669, 0.4593662 , 0.48406028, 0.4526651 , 0.46001507,\n 0.48118006, 0.5294147 , 0.53076833, 0.4787791 , 0.47996894,\n 0.44641416, 0.48250367, 0.48269964, 0.49808578, 0.57851296,\n 0.55575702, 0.49335675, 0.53381615, 0.52763723, 0.51874272,\n 0.48449161, 0.54815292, 0.45062441, 0.521377 , 0.50926011,\n 0.52282439, 0.53104155, 0.50582164, 0.46798903, 0.47273825,\n 0.51961557, 0.55223779, 0.52318457, 0.54208213, 0.51812459,\n 0.55992107, 0.48963462, 0.54657144, 0.51707202, 0.48923537,\n 0.53118936, 0.50839344, 0.50624422, 0.51701415, 0.50237139,\n 0.52909628, 0.50828604, 0.52146805, 0.53679188, 0.4696402 ,\n 0.54550002, 0.49338531, 0.48342327, 0.55110842, 0.50910738,\n 0.51598703, 0.49536839, 0.46054526, 0.54135241, 0.53620886,\n 0.50751537, 0.49578939, 0.51854171, 0.53459467, 0.53698073,\n 0.52076779, 0.53930056, 0.55693755, 0.56330514, 0.54042356,\n 0.49135896, 0.46826939, 0.47642666, 0.52091203, 0.51972218,\n 0.49114814, 0.50305294, 0.49013528, 0.48449883, 0.48801788,\n 0.44691148, 0.55053182, 0.54364458, 0.50496596, 0.53677642,\n 0.53519451, 0.53467746, 0.49012529, 0.50976122, 0.51972499,\n...\n 0.53135932, 0.49476602, 0.49237559, 0.49237559, 0.52153435,\n 0.51780916, 0.45114472, 0.43669586, 0.5219717 , 0.49255034,\n 0.54366199, 0.50351056, 0.55812408, 0.49702379, 0.49229607,\n 0.54182283, 0.56007076, 0.51446918, 0.51673506, 0.5027071 ,\n 0.51314394, 0.51399041, 0.4969603 , 0.54656141, 0.50973111,\n 0.46628013, 0.50426948, 0.51560854, 0.49978386, 0.45752611,\n 0.47419317, 0.47419317, 0.49069551, 0.51973564, 0.49829212,\n 0.49473534, 0.50672342, 0.49824202, 0.48064452, 0.50357108,\n 0.49601765, 0.5384329 , 0.51425332, 0.4635438 , 0.5117676 ,\n 0.50766191, 0.52603337, 0.52351951, 0.5366462 , 0.49393139,\n 0.54399711, 0.50216277, 0.48067998, 0.51274 , 0.44683778,\n 0.4511459 , 0.51588204, 0.49525355, 0.52425554, 0.47243496,\n 0.47243496, 0.47541677, 0.48109169, 0.49133874, 0.47860567,\n 0.5068819 , 0.47844727, 0.52531496, 0.50954969, 0.5266407 ,\n 0.48567178, 0.52013718, 0.49702878, 0.5152404 , 0.50095719,\n 0.4890485 , 0.50336455, 0.50587956, 0.503944 , 0.47849903,\n 0.53080546, 0.53078259, 0.50781999, 0.50148963, 0.51480523,\n 0.50365531, 0.49776146, 0.52533235, 0.53007223, 0.4927408 ,\n 0.46641809, 0.46647075, 0.48059084, 0.46988261, 0.53398489,\n 0.48715129, 0.50732104, 0.47348156, 0.51305504, 0.50231702]) a
(sample)
float64
1.42 1.396 1.432 ... 1.465 1.531
array([1.41962593, 1.39567367, 1.4317506 , 1.43815873, 1.43815873,\n 1.41506752, 1.42489165, 1.47228951, 1.54019871, 1.54261446,\n 1.50883799, 1.48778168, 1.48907592, 1.43981795, 1.47780259,\n 1.47182224, 1.49534181, 1.37663927, 1.49219117, 1.45348848,\n 1.53877615, 1.48388185, 1.50187359, 1.42217578, 1.38681059,\n 1.37511451, 1.47061828, 1.4336922 , 1.42147773, 1.41324983,\n 1.49542278, 1.38724289, 1.52539289, 1.49257124, 1.49170351,\n 1.45692004, 1.4366145 , 1.48474581, 1.52332005, 1.54322473,\n 1.4600666 , 1.41252005, 1.44291269, 1.45527941, 1.42233897,\n 1.43794569, 1.47904204, 1.43858699, 1.44538457, 1.38315857,\n 1.47805613, 1.43128946, 1.47748292, 1.47310777, 1.43448457,\n 1.45652247, 1.48214165, 1.41785655, 1.4974154 , 1.42919286,\n 1.44088427, 1.45988243, 1.47393757, 1.4156567 , 1.5180166 ,\n 1.40384394, 1.42316929, 1.45782434, 1.50473414, 1.44310825,\n 1.4585378 , 1.46519248, 1.44046144, 1.4418534 , 1.44324006,\n 1.44996868, 1.4305769 , 1.36119743, 1.42224077, 1.44935953,\n 1.49600976, 1.4710327 , 1.47940687, 1.41755625, 1.45677903,\n 1.41673891, 1.47461421, 1.45417031, 1.53012483, 1.51558344,\n 1.52305116, 1.43019528, 1.39317838, 1.49107451, 1.3956755 ,\n 1.42923222, 1.44233663, 1.43354637, 1.45811866, 1.41322255,\n...\n 1.47569475, 1.49894881, 1.454072 , 1.454072 , 1.43673935,\n 1.42883534, 1.52023953, 1.4998433 , 1.49035233, 1.43324679,\n 1.4293716 , 1.4480808 , 1.40817894, 1.38344763, 1.48465827,\n 1.42550672, 1.41047898, 1.4757535 , 1.4400986 , 1.45069438,\n 1.47423521, 1.45967282, 1.47093848, 1.42788083, 1.47629967,\n 1.48285612, 1.38494346, 1.46100182, 1.51436122, 1.46489208,\n 1.50264806, 1.50264806, 1.4876187 , 1.4092414 , 1.44732079,\n 1.42765811, 1.46670338, 1.48656924, 1.4691816 , 1.46190357,\n 1.46920654, 1.41735254, 1.45906975, 1.46405789, 1.46577055,\n 1.39656671, 1.45102417, 1.38562744, 1.40765999, 1.48777242,\n 1.3922839 , 1.44482542, 1.51418906, 1.4185966 , 1.51692311,\n 1.52406142, 1.45007398, 1.46300545, 1.42843102, 1.46536687,\n 1.46536687, 1.46885152, 1.46241395, 1.5309264 , 1.51293759,\n 1.48528343, 1.46810474, 1.46595447, 1.45276806, 1.46347675,\n 1.4390223 , 1.41264463, 1.48994292, 1.40924903, 1.46437362,\n 1.48909828, 1.4463778 , 1.44434882, 1.46632659, 1.46442781,\n 1.41721859, 1.43887618, 1.45153336, 1.45764906, 1.47446183,\n 1.45247501, 1.4773564 , 1.41632352, 1.48490392, 1.44130061,\n 1.54995306, 1.46375494, 1.53193968, 1.4798981 , 1.46266293,\n 1.43188296, 1.46957178, 1.45873085, 1.4645994 , 1.53145956]) v
(sample)
float64
0.46 0.4994 ... 0.4756 0.4639
array([0.46002474, 0.49935788, 0.42010292, 0.47832265, 0.47832265,\n 0.49531407, 0.42805387, 0.47274173, 0.49211158, 0.46531771,\n 0.52405831, 0.52751463, 0.48996999, 0.48740123, 0.45327257,\n 0.44433403, 0.3999858 , 0.4400614 , 0.49898925, 0.46203263,\n 0.51281462, 0.54555939, 0.58259382, 0.40303473, 0.44202404,\n 0.44628367, 0.47327411, 0.44126281, 0.42563734, 0.44816899,\n 0.51424922, 0.40406922, 0.50454619, 0.54675165, 0.56177533,\n 0.42184901, 0.44984197, 0.45478304, 0.48355605, 0.47857872,\n 0.42284614, 0.42280862, 0.39300878, 0.44165036, 0.40855478,\n 0.46295939, 0.38347846, 0.43803047, 0.41221236, 0.40121386,\n 0.48724416, 0.41713424, 0.4894201 , 0.45898768, 0.45573498,\n 0.49215874, 0.45280984, 0.4394082 , 0.47264357, 0.41767965,\n 0.42896376, 0.4754212 , 0.49607791, 0.42347567, 0.51022987,\n 0.34891115, 0.49754998, 0.46131279, 0.45161885, 0.4376886 ,\n 0.46499653, 0.47842048, 0.54809135, 0.51327392, 0.44520869,\n 0.44395821, 0.4539666 , 0.44784109, 0.41385033, 0.44637958,\n 0.49903689, 0.4313524 , 0.46596975, 0.42145104, 0.42400477,\n 0.45833683, 0.44103262, 0.47911584, 0.5030067 , 0.54724429,\n 0.47889834, 0.50877738, 0.46336246, 0.3988057 , 0.47521016,\n 0.53218054, 0.45433204, 0.46957592, 0.44154727, 0.42166304,\n...\n 0.46086418, 0.45642178, 0.43570236, 0.43570236, 0.4829974 ,\n 0.4666672 , 0.52365082, 0.54384614, 0.433968 , 0.39185751,\n 0.49576688, 0.44625795, 0.47557563, 0.43994571, 0.54460588,\n 0.46881728, 0.44246676, 0.43203313, 0.47421233, 0.45738979,\n 0.44339128, 0.49097843, 0.50331195, 0.44334794, 0.44424603,\n 0.44985518, 0.47824584, 0.48500045, 0.45922198, 0.44563862,\n 0.48540394, 0.48540394, 0.47877128, 0.41778203, 0.5043077 ,\n 0.44035812, 0.47148907, 0.51071731, 0.49095185, 0.52204284,\n 0.51396949, 0.47694901, 0.44565396, 0.43047348, 0.40925179,\n 0.54186569, 0.43236364, 0.46116241, 0.42952852, 0.40603855,\n 0.41185788, 0.52819373, 0.46235274, 0.52754265, 0.47926358,\n 0.45898006, 0.48723861, 0.4859017 , 0.46183195, 0.53758073,\n 0.53758073, 0.52305545, 0.49480701, 0.48161108, 0.46251392,\n 0.48742003, 0.46839586, 0.47882352, 0.50716931, 0.51746354,\n 0.452746 , 0.4437606 , 0.51680848, 0.46444835, 0.40887173,\n 0.42780076, 0.469227 , 0.43542738, 0.46011848, 0.47629926,\n 0.4202373 , 0.36867242, 0.52693297, 0.47675539, 0.56824748,\n 0.54620153, 0.46019097, 0.48401945, 0.47754063, 0.45489857,\n 0.56569335, 0.43165606, 0.57073731, 0.51990311, 0.46522088,\n 0.53405837, 0.49894122, 0.54383762, 0.47557582, 0.46393061]) z
(sample)
float64
0.4921 0.4953 ... 0.5448 0.5019
array([0.49214773, 0.49526222, 0.52065169, 0.50104238, 0.50104238,\n 0.50071145, 0.50939918, 0.50037135, 0.49347186, 0.49116017,\n 0.46882245, 0.49284441, 0.47529894, 0.47943944, 0.46911097,\n 0.5110609 , 0.54573222, 0.50257665, 0.48516217, 0.50222268,\n 0.47693099, 0.4725742 , 0.48062739, 0.51638469, 0.51507879,\n 0.52305171, 0.49708241, 0.50711891, 0.50880663, 0.49950493,\n 0.5061342 , 0.53070657, 0.47466182, 0.51307581, 0.51866077,\n 0.50979223, 0.52732014, 0.51605704, 0.5082604 , 0.49617651,\n 0.51909792, 0.53961015, 0.52655521, 0.5407605 , 0.50878472,\n 0.53094502, 0.51847335, 0.53677114, 0.5182387 , 0.51909091,\n 0.53485403, 0.49622814, 0.51531898, 0.51205144, 0.51825837,\n 0.51595277, 0.52232993, 0.50775073, 0.52043147, 0.50780626,\n 0.52627376, 0.49105109, 0.50583662, 0.51454489, 0.51166629,\n 0.54910688, 0.5072023 , 0.50350706, 0.52234158, 0.51050429,\n 0.52470601, 0.46930605, 0.50928572, 0.49088719, 0.52006544,\n 0.52272162, 0.51856312, 0.52149496, 0.5162203 , 0.52257794,\n 0.51128888, 0.50527449, 0.49711225, 0.5059195 , 0.52861927,\n 0.50761143, 0.5138448 , 0.50312694, 0.49573275, 0.48988484,\n 0.48831786, 0.51759673, 0.52700703, 0.53236432, 0.49631835,\n 0.51226661, 0.51636912, 0.49635375, 0.4924294 , 0.49782483,\n...\n 0.52986081, 0.51755342, 0.50477203, 0.50477203, 0.51480378,\n 0.49696597, 0.49560143, 0.47340147, 0.52754865, 0.51601404,\n 0.52566238, 0.50630206, 0.50571067, 0.51737786, 0.49776678,\n 0.50774332, 0.5138328 , 0.5173622 , 0.50385519, 0.52668574,\n 0.51967973, 0.49394986, 0.48228597, 0.52173284, 0.5007935 ,\n 0.51963708, 0.50798951, 0.48752222, 0.50583894, 0.49544127,\n 0.48805774, 0.48805774, 0.51450044, 0.49002662, 0.50186736,\n 0.50550457, 0.49502883, 0.51691578, 0.48990291, 0.49806719,\n 0.48693259, 0.50484327, 0.50447346, 0.50352883, 0.50816364,\n 0.48608033, 0.50795832, 0.50649649, 0.48120236, 0.5095809 ,\n 0.50024496, 0.51276875, 0.49020348, 0.48025969, 0.50855525,\n 0.5057506 , 0.50356763, 0.51698105, 0.51985403, 0.51618664,\n 0.51618664, 0.52155587, 0.49933288, 0.52577785, 0.50430775,\n 0.51066993, 0.49952365, 0.51502769, 0.52448468, 0.491171 ,\n 0.49573328, 0.51049909, 0.49443511, 0.49829613, 0.50818895,\n 0.52376491, 0.49390018, 0.51250314, 0.52820319, 0.49180201,\n 0.5296624 , 0.50902894, 0.50374124, 0.48193598, 0.48672799,\n 0.49286581, 0.51510466, 0.52152346, 0.5045378 , 0.51264841,\n 0.48071807, 0.49692682, 0.48801087, 0.48245312, 0.50786497,\n 0.48692662, 0.48323057, 0.48457479, 0.54476633, 0.5018969 ]) Attributes: (8)
created_at : 2026-01-08T04:55:33.399374+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 30.849383115768433 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "CLip",
+ "code_hash": "d4bef0a0e0ab47d186f5daddf5090311",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "iXej",
+ "code_hash": "cea6cff9fcb97bb61a9fe87dafac4136",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "UmEG",
+ "code_hash": "4ce037f032ffd2fbc6d83f012c29d93e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IpqN",
+ "code_hash": "ece8ee4092a1e62ff45fa6d4ddea76fa",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "dlnW",
+ "code_hash": "86d097df4bb8fc71e738c0a6a3ad02bd",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "RKFZ",
+ "code_hash": "de964d1e39e02f7d66ea3cfd8b724cf0",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IWgg",
+ "code_hash": "45f1fd310a291c447df837b24d5c4d41",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "LkGn",
+ "code_hash": "47a838cfd5962ac8ef30dd5da4a78c4c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "HnMC",
+ "code_hash": "d09102f45050087f0419495714baca8c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "hgqU",
+ "code_hash": "7c2aec14da23e91ab964c6ad4346857b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "streamMedia",
+ "name": "media",
+ "mimetype": "application/vnd.marimo+mimebundle",
+ "data": "{\"image/png\": \"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAA+4AAANlCAYAAADik6OlAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAewgAAHsIBbtB1PgAA7lVJREFUeJzs3Xd0VNXax/HfmfRK6L2HEFCa9N4UFURAsaOgYLk29KJ4RV+Ea8GGBcv1Iki5iqKooAYFxdBrIFKkhF5CDRAS0st5/xgyJKRMEpLMCXw/a81aJ3P22efJzGQyz+zn7G2YpmkKAAAAAABYks3VAQAAAAAAgPyRuAMAAAAAYGEk7gAAAAAAWBiJOwAAAAAAFkbiDgAAAACAhZG4AwAAAABgYSTuAAAAAABYGIk7AAAAAAAWRuIOAAAAAICFkbgDAAAAAGBhJO4AAAAAAFgYiTsAAAAAABZG4g4AAAAAgIWRuAMAAAAAYGEk7gAAAAAAWBiJOwAAAAAAFkbiDgAAAACAhZG4AwAAAABgYSTuAAAAAABYGIk7gHKrQYMGMgxDhmHowIEDrg4H5czMmTMdr58RI0a4OpwC9erVyxHr0qVLXR0OLKg8vZ4BAEVH4g4UIPuH5bxuAQEBatCggQYNGqSPPvpI586dc3XIuEIlJydrwYIFevLJJ9WuXTvVrVtXPj4+8vPzU+3atdWjRw/985//1O+//67MzExXhwtctgMHDuR6z61WrZrS09ML3UdGRoZq1qyZqx++6AMAlDck7sBlOH/+vA4ePKiffvpJTz/9tOrVq6fZs2e7OqxCy/7BuEGDBq4OB3lIT0/Xf/7zHzVu3FiDBw/WJ598oo0bN+rIkSNKTk5WYmKijh49qhUrVuj9999Xv379VK9ePX3yySdFSnBQdhg9L75Tp07p119/LXT7RYsW6fjx46UYUcF4jwUAlBR3VwcAlBft27dXhw4dHD+bpqnY2Fht2LBBu3fvliTFxcVp+PDhSk5O1iOPPOKqUHGFOHv2rG6//XaFh4fnuL969epq27atqlatKpvNpuPHj2v79u06ePCgJCk6OlpPPvmk/v77b3366aeuCB0oNbNnz9bAgQML3RYAgCsBiTtQSP3799eECRPy3Pfjjz/qwQcfdJTKP/300+rfv7/q1KlThhFefa7kctfY2Fh16dJFO3fudNx3880365VXXlGHDh1kGEauY7Zu3arp06frs88+U0pKihITE8syZJQiRual5s2ba/v27fr5558VGxuroKCgAtufO3dOCxYsyHHslWzEiBFc2w4AVzBK5YESMGTIEH311VeOn1NSUhjpRLGZpqnhw4c7knabzaZPPvlECxcuVMeOHfNM2iWpRYsW+uCDDxQVFaUePXqUZchAqbv//vsl2d9f586d67T9t99+q+TkZEnSAw88UKqxAQBQ2kjcgRIyYMAAtWrVyvHzH3/84cJoUJ599dVX+umnnxw/T5o0SY8//nihj69Xr56WLFmi4cOHl0Z4gEvce++9cne3FwoWpgQ+q42Hh4fuvffeUo0NAIDSRuIOlKAuXbo4tvft25dvu0WLFumhhx5SSEiIAgMD5ePjo/r162vIkCGaOXOm0tLSCnW+tLQ0ffnll7rtttvUqFEj+fv7y93dXQEBAQoODtaNN96o8ePHa/369TmOy1o2qGHDho77Dh48mO/s+QXZsWOHxo0bpw4dOqh69ery9PRU1apV1bFjR40fP15Hjx51+nvkNVnXsWPH9MYbb6hDhw6qUaOG3NzccpXGFnU5uIMHD2r8+PHq1KmTI9bq1aurU6dOeuWVV3T48GGnfSxdutRxzl69ejnuX7hwoe655x41adJE/v7+MgxDH3zwgdP+LmWapt566y3Hz+3atdNzzz1X5H7c3d3Vu3fvAtvExMTozTffVM+ePVWzZk15eXmpSpUqatOmjZ5//vlClRbnN/nWypUrNWrUKIWGhqpChQoyDEPPPPOMY39er6/Nmzdr9OjRuvbaa1WpUiUZhqHBgwfned7Tp09r8uTJuuGGG1S3bl15e3srKChIzZs31xNPPKGIiAinsRfFxo0bNWnSJN1yyy2Ov7Ws10+XLl300ksv6dChQwX2kfX7Llu2zHFf79698/ybmzlzZo5jizqhXWk+txERERo1apRCQkLk6+urihUrqkOHDnrjjTeUkJDgtN/iqlatmm666SZJ0urVq7V379582+7fv1+rVq2SJN10002qWrVqoc+TlJSk+fPn6+mnn1a3bt0c7xX+/v5q0KCBhgwZounTpys1NTXfPi7nPba47zHOloObN2+eY7+7u7tWr15d4OOQmpqqtm3bOo655ZZbCn7gAAClywSQr549e5qSTEnmK6+84rT9uHHjHO09PDxy7T9x4oTZt29fR5v8bk2aNDE3bNhQ4Ll27dplNmvWzGlfWbfdu3c7jp0xY0ahj8vvbSI5Odl89NFHTTc3twKP9fHxMT/66KNCP87h4eHm/PnzzYoVK+bqq0KFCjmOq1+/vmPf/v37CzzHa6+9Znp7excYq7e3t/nmm28W2E94eLijfc+ePc3Y2FhzyJAhefb3/vvvF9hXXpYvX56jjy+//LLIfRTG9OnTzQoVKhT4eLi5uZnPPPOMmZ6enm8/+/fvd7SvX7++mZKSYj766KN59jd69GjHcZe+vl555ZU8X0uDBg3Kdc6PP/7YaeyGYZgPPfSQmZKSkm/s2f8Ohg8fnm+79u3bF+rvxMPDw3zrrbfy7acof3MzZszIceylfyMFKa3nNjMz0xw/frxps9ny7bdhw4bm3r17C4yvsLKfX5KZlJRkfvvtt46fx48fn++xEyZMcLT77rvvzKSkpBx95fd+sXbtWtPf379Qz1GDBg3MTZs25dnP5bzHFvc9pjCv54ceeijHc3Xu3Ll8H8PnnnvO0bZ69ermyZMn820LACh9TE4HlKCzZ886titUqJBj34kTJ9S1a9cco0SNGzdWx44d5eXlpe3bt2vdunWSpN27d6t379767bff1LVr11zniY+P1/XXX+8YIbbZbGrTpo2aNWsmf39/JSYmKjo6Wps3b1ZMTEyu45s1a6YnnnhC8fHxjnLSgICAQl8HmpCQoBtvvNExopX1u7Rt21YVK1bUmTNntGrVKh09elRJSUl66qmnFBcXp3Hjxjnte/Xq1ZowYYLS0tJUuXJl9ejRQ1WqVNHJkycVGRlZqPgu9eSTT+qTTz5x/Ozv76/evXurRo0aOn78uMLDw3X+/HklJyfrX//6l44fP67333/fab+maWrYsGH65ZdfZBiG2rVrp+bNm8s0TW3bts1ptUJe/vzzT8e2p6enbr/99iL34cy7776r559/3vGzl5eXevbsqXr16uns2bMKDw/XmTNnlJGRoQ8++ECHDh1yjNY58+yzz+q///2vJPs1961atZKHh4eioqJks+Vd5PXOO+9o4sSJkuyvow4dOsjX11cHDhyQh4dHjrbPPPOMPvzwQ8fPVapUUefOnVWjRg0lJycrMjJS27Ztk2ma+uKLL3T06FGFhYXle+7CyBpJ9/Ly0jXXXKPg4GBVqFBBpmnq2LFjWrdunWJiYpSWlqYXXnhBkjR27Nhc/TzxxBOS7JNZZlWiDB48WLVr187VtlmzZsWKtTSf24kTJ+rf//63JKl169Zq0aKFPDw89Ndff2nTpk2S7CPdgwcP1qZNmxxl7SXp1ltvVVBQkGJjY/Xll19qwoQJecae9b5WsWJFDRw4UKZpFqr/s2fP6vz585LsI/zXXHON6tSpIz8/PyUmJmrPnj1av3690tPTdeDAAfXs2VObNm1ScHBwjn4u9z02S0m/x0yZMkUrVqzQ7t27tX//fj3++OP68ssvc7X7448/NHnyZElyVIAUpWoBAFAKXPilAWB5RR1xb9mypaN9+/btc+y7+eabHfv8/PzMr7/+OtfxGzZsMBs1auRoV7duXfPs2bO52n3wwQeONs2bNzd37tyZZzyZmZnm+vXrzX/84x/moUOHcu2/dEStsB544AHHcSEhIXmOAKanp5uffvqp6eXl5RjhW716dZ79ZX+c3d3dTcMwzFdffdVMTU3N0S45OTnHz4UZcZ87d26OEaoRI0bkGmU6d+6cOWzYsBztvv/++zz7yz4a5u7ubkoyW7RoYW7ZsiVX20vjLYzsFRmXvoZKwqpVq3KMbN98883m8ePHc7RJTk42n3/++RyPx+TJk/PsL/trKKvfunXrmsuXL8/VNvvjkb1vd3d3s0KFCuaPP/5Y4DHTp093HBMYGGh+/vnnuV4jpmmaf/75p1m7dm1H2/xGwQs74v6Pf/zDDAsLMxMTE/Pcn56ebs6YMcP08/NzjLzv27cv3/6KMnpelGNK87n19PQ0DcMwGzdubK5bty5X22+//db08PBwtJ81a1ahfq+C5DXibpqm+cgjjzjuy+t1tmLFCsf+Rx991DRNs0gj7uPGjTO3bt2ab1wnTpww77//fkdfffv2LdTvUNj32OK+xxT29bxhw4Ycz9WlVT0xMTFmrVq1HPuffvrpQsUNAChdJO5AAYqSuP/yyy85Phj+61//cuz7888/c+z75Zdf8u1n//79OcpcJ06cmKvN7bff7tj/+++/F/v3K86Hyuyl3I0bNzZPnTpVYPvsHyZvuummPNtkf5wlma+99lqhYnGWuGdkZJgNGzZ0tLnjjjvMzMzMPPvKzMw0Bw0alON3y8jIyNUu+4dqSWaNGjWcPgZF0bhx4xxfMpS0Hj16OPrv0qVLgaXkTz/9dI5EOS4uLlebS5MrX19fc9euXU7jyH6MzWYzly1bVmD7uLg4MygoyJFErl27tsD227dvd1waUblyZTMhISFXm8ImOoX1zTffOPobO3Zsvu1KK3Ev7ee2cuXKZnR0dL59Zi+tzu9vvSjyS9xXrVrluG/UqFG5jnv44Ycd+7O+LCxs4l4U2b+M3b59u9PfoTiJe1HeY4ryep40aZKjbYUKFXI8HtnfB1u0aOF43AEArsXkdEAJmD9/voYNG+b42cvLK8cs4Fmlw5K91HPAgAH59tWgQYMcJeWfffZZrjLPuLg4x3ZZly++9957ju3JkyerSpUqBbYfMWKEQkNDJdkn5Tt9+nSB7WvVquUoN75cixcv1v79+yXZy86nTJmSb2mpYRj65JNPHKXZe/fu1e+//+70HOPHj3f6GBTFmTNnHNvO1qkuqh07dmj58uWOnz/++GN5enrm2/6NN95w/G5xcXGaM2eO03M8+eSTCgkJKVJcQ4cOdbp83RdffKHY2FhJ0uOPP66OHTsW2L5Zs2aOWfVPnz6t3377rUgxFcfQoUPl7+8vqexXlSiL53bcuHGqVatWvvsfeughx/aGDRsKE3axdOnSxVGa/t133zmWfJOk5ORkfffdd5KkJk2aqHPnzqUWR/YJ4Erz+S7p9xjJfilH1uSV586d07Bhw5SRkaHPPvtMCxYskCR5e3trzpw58vb2LtFzAwCKh2vcgUJauHBhruvFY2NjtX79eu3evTvH/e+9957q1q3r+Dk8PNyxnf3DbX4efPBBvfjii8rMzNSxY8e0a9cuR/IrKUffn332mf7zn/8U+fcpjvT0dEcyGxgYWOhZhnv37q2dO3fKNE2tWrVKt956a75thw4dWmLXxma/Xrx///6qUaNGge1r166tm266ST///LMk+/N24403FnjMXXfddfmBZhMfH+/YzkoCS0r212Hr1q3Vpk2bAtv7+fnpnnvu0UcffeQ4/tFHHy3wmLvvvrvIcRXmmIULFzq2C7u0V58+fRxfmq1cuVK33XZbkWO71JYtWxQZGakDBw4oLi5OKSkpOfZnfTG0detWZWZmXta19UVRFs/tHXfcUeD+0NBQ+fj4KCkpSadPn1Z8fLwCAgIK+RsUzf33369XXnlF586d04IFCxx/hwsWLHB8wZO17ntxJSYmau3atdq6datOnTql+Ph4ZWRkOPZHR0c7tv/666/LOldBSvo9RrLPizJ79my1atXKMSfJww8/rG+++cbR5u2339a1115b4ucGABQPiTtQSBs2bHA6ihQQEKAPP/xQDz74oOO+6OhonTx50vFz9iXj8lO1alWFhIRo586dkqRNmzblSNzvvPNOffHFF5LsifvGjRs1fPhw3XjjjbkmSSpJW7ZscSz35OHhodGjRxfquOyPm7Ml19q2bVv8AC+RfTK7wjzuktS1a1dH4p414VZ+GjZsqEqVKhU/wDwEBAQ4JjnMmiSrpBT38chK7pw9Hh4eHmrRokWR4yrMc75mzRrH9tSpUzVr1iynxxw5csSxXZil/goya9YsvfHGG4qKiipU+7S0NJ07d04VK1a8rPMWVmk/txUqVMjxhWFeDMNQxYoVlZSUJMk+kl+aifuECRNkmqZmz57tSG6zJoIzDKPYifuZM2c0fvx4zZ49O8cXaQXJaxLQklAa7zFZ6tSpo88//9wxAeaMGTMc+26++WY99dRTpXJeAEDxkLgDl8Hf31+VK1dWy5Ytdf311+uBBx7IVd586tQpx7aPj0+hS9sbNGjgSNwv/VB444036qmnnnJ86M7+pUL16tXVrVs39erVS4MHD1adOnWK++vlkn1N9tOnT+eYqb2wss+8n5eSLP3P/tjXr1+/UMdkX6/a2Yfx0rhMoVKlSo7HKGvksKSU9uNRsWLFYlVLOHscz58/nyOBmjZtWpHP4ex1lx/TNDVy5MgcSU1hxcfHl1niXtrP7aWrZOQn+yoAaWlphTqmOBo2bKhu3bppxYoVWrx4sU6cOCHJfnmMJHXv3j3H71dYBw8eVI8ePRwrCRRWYRP8oirtS6Fuu+02jRo1KsffVLVq1Yr1egcAlC6ucQcK6ZVXXpFpn9DRcYuPj9eBAwf0008/6emnn87zmuTso6Z+fn6FPl/2tnl9KJwyZYp++OEHdejQIcf9J06c0Pfff6+nnnpK9erV09ChQ4v8ITQ/586du+w+0tPTC9zv4+Nz2efIUpzH3tnjnl1Jxpole7Kxffv2Eu3bqo+Hs+PK4nWXn88//zxHEnPTTTdp1qxZ2rp1q86ePauUlJQc7wnZk+bMzMzLjruwSvu5Lc7ShqUtaw6D9PR0zZkzR3PmzHE8z1n7iuree+91vF8GBATo2Wef1W+//aZ9+/bp/PnzysjIcDzX2S9PKK3nujTeYy5VvXr1HD937tw5130AANdjxB0oZdmvU84qMy+M7G3zKzcdMmSIhgwZokOHDmnp0qVavXq1VqxY4Uj4TNPU999/79hX1EnDLpX9g37Lli21efPmy+qvtBXnsS/M416aunXrpiVLlkiSNm/erJSUFHl5eZVI3+Xx8ZByJ6Jnzpwps5Hsd99917E9ceJEjR8/vsD2pTXy6kx5fW4vxx133KGnnnpKSUlJmj17tmMSTx8fH6fX4+dl9erVWr16tST747l27Vo1b9483/aueq5L0ooVK/Tmm2/muG/BggX66quvdN9997koKgBAXhhxB0pZ9lLHpKSkQl8LeeDAAce2sxmF69WrpwceeECfffaZ/v77bx06dEgTJ06Ur6+vJHtZ+z//+c+iB3+J7KMwx48fv+z+Slv2x76wVQdFedxLQ58+fRzbKSkp+v7770us7/L4eEj22fWzf3lRVq+9w4cPOyaeDAoK0osvvlhg+7i4uGKX5F+u8vrcXo7AwEANGjRIkn1yuKwvEgcPHlysLyKyvjCT7CP2BSXtkr2svjw7d+6c7r//fseEe9nnUXniiSfK/e8HAFcaEneglNWuXVvVqlVz/Jw1olOQmJiYHJNgXXfddUU6Z926dTV+/HhNnTrVcd/ixYvznQG7sFq3bu1IoE6ePKk9e/YU6fiyln1m7cI87pe2K+rjXhK6d++eYybnDz74oMTKcMvj45El+yUhq1atKpNzZp/TITQ0NMf123lZuXJlrqUb81IaZefl+bm9HA888ECh7iuM7M93YSZZzL78Xn6seIlBln/84x+O5Lx58+aKiIjIc4k4AIA1kLgDZSDrw5AkzZw502n7mTNnOpK1WrVqqWnTpsU6b/Zl19LS0nKsES4px/q8hZlIysfHJ8eI8KefflqsuMpK9lgXLlyYY3b/vBw9elS//vprnseXFcMwNHbsWMfPGzZs0Pvvv1/kftLT03Ncgyvl/H0iIyO1ZcuWAvtITEzMsTyUKx6PLNmXHvzPf/5TqAT5cmVfyi0xMdFp+8Iuy1jUv7vCKM/P7eXo169fjmUea9asqRtuuKFYfRXl+T569KhjvfOClMZzXRL+97//6euvv5YkeXp6as6cOfLz89Ps2bMdl6GsXLlSr7/+uivDBABkQ+IOlIHs6yP/+OOPWrRoUb5tDx48mOPD0qOPPppr1Kaw5fbZl8Cy2WyqXLlyjv1BQUGOD6unTp0q1AfLF154wbH90Ucf6Y8//ihULFLZl9f369dPDRs2lGQvO3/mmWfybWuapp566inHY9C4cWNdf/31ZRFmLsOGDdOAAQMcP7/wwgs5qiecOXTokK6//vpcS6aFhoaqR48ejp+ffPLJAp/zl19+2fFlR2BgYKHXTy8Njz76qGPyx02bNmnixImFPjYmJqZYI4cNGzZ0/O1t27ZN+/bty7ft3Llz9csvvxSq3+x/h9nXAr8c5fm5vRxubm5asWKFY2WN5cuXy83NrVh9NWrUyLH9008/5dsuIyNDjzzyiFJTU532WZz32NK2f/9+PfHEE46f33jjDbVq1UqSfYm47O81r776qtauXVvmMQIAciNxB8pA7969dfPNNzt+Hjp0qL777rtc7TZu3Kjrr7/esQxY3bp19fTTT+dq17lzZ91777369ddf8/3wGBUVlWNm5b59+8rT0zNHGy8vLzVp0kSSfTRo/vz5Tn+Xnj175pjNecCAAZo0aVK+a44nJydr/vz5GjRoUI4KgLJgs9lyTLz09ddf6+GHH84Va3x8vB588EH98MMPjvvefvvtHCNwZckwDM2ePdvx3GRkZOjRRx/VwIEDtWHDhnxHm7dt26ZnnnlGISEhWrZsWZ5tJk2a5EhsVqxYodtvvz1XJUJqaqpefPHFHCP9r7zySo4J0MpahQoVcsQzceJEDR8+PN/ruU3T1KpVq/T444+rXr16jrXFi6JKlSrq1KmTJPus4UOHDtWuXbtytMnMzNQnn3yi+++/X25ubjlGWPOT/VKIefPmlVj1QHl9bi9XcHCw2rVrp3bt2ik4OLjY/QwYMMDxRc3SpUv13HPP5XrdHD9+XLfffrvCwsIKNXt/cd5jS1NGRobuu+8+x8R6119/fa75T4YOHaoHH3xQkv09ftiwYVfERHwAUN4xqzxQRmbMmKGuXbtq7969On/+vO688041adJEHTt2lKenp7Zv365169Y5PsT7+fnp66+/znOJubS0NH399df6+uuv5ePjo5YtW6pRo0YKDAzU2bNntW/fPkVERDja+/j45JgdO7vbb79db7zxhiTpvvvu08yZMxUcHJzjet5Lj/3vf/+rY8eOafHixUpNTdW4ceP02muvqWPHjqpXr568vLwUGxurvXv3atu2bY5r69u2bXtZj2Fx3HnnnVq+fLljzflp06Zp7ty56t27t6pXr66TJ09qyZIlOZL5Z555RrfddluZx5pdpUqVtGbNGt1+++2OJPyXX37RL7/8oho1aqht27aqWrWqbDabjh8/rr///jvXZFJ5TdDVpUsXvfnmm3r++eclST///LPq1aun3r17q27dujp79qzCw8N1+vRpxzFDhgzRs88+W4q/beGMGDFC+/bt06uvvipJmj17tr766iu1bt1aoaGh8vf31/nz53XkyBH99ddfJbKM3Kuvvqp+/fopMzNTkZGRatGihbp27apGjRrp/PnzWrFihY4dOyZJev311zV16lSnk3rddtttGjdunEzTVFhYmFq2bKkuXbrkeL7uvvtutWvXrkixlufn1gpCQ0N1//33a/bs2ZKkyZMna86cOWrfvr2qVaumAwcOaPny5UpNTVVAQIDeeecdPfbYY077Lc57bGl59dVXtWbNGkn2yo9Zs2bleR3+lClTtGLFCu3Zs0d79+7VU089VajLvAAApcgEkK+ePXuakkxJ5iuvvHLZ/R0/ftzs06ePo8/8bsHBweb69evz7efaa6912kfWrWHDhuaqVavy7Ss2NtYMDQ0tsI+8pKenm//3f/9n+vr6FioODw8P84knnsizr+yPc3h4eKEfz/r16zuO279/f4FtX331VdPLy6vAGL29vc033nijwH7Cw8Md7Xv27FnoWIsrNTXV/Oijj8yaNWsW+jlv3LixOX36dDMjIyPffqdNm2YGBgYW2I+bm5s5evRoMz09Pd9+9u/f72hfv379Qv9ezl5fBZk7d65Zq1atQj8eHTp0MJOTk3P1M2PGDEeb4cOH53u+//znP6a7u3u+/dtsNnP8+PFmZmZmoV+TL774YoExz5gxI0f7ovyNuPK5LcrfpDPZzy/JTEpKKnZfSUlJOfrKL7aEhASzX79+BT52derUMVeuXFno94KivscW9z3G2et51apVppubm6PNDz/8UGB/69aty/G6//bbbwsdCwCg5DHiDpSh6tWra8mSJfrtt980d+5crVy5UsePH1daWpqqVaumNm3aaPDgwRo2bFiBM1j/9ddfWrt2rcLDw7V+/Xrt2rVLR48eVWJionx9fVWjRg21bt1at956q+68884C1wGvUKGCNmzYoE8//VRhYWHasWOHYmNjnV6L6ebmpn//+9966qmnNHv2bP3xxx/avn27YmJilJaWpsDAQNWvX18tWrRQ79691b9//xxLVpW1l19+Wffff7+mTZumRYsWaf/+/YqNjVVQUJAaNWqkG2+8UaNGjVK9evVcFmNePDw89OSTT2rkyJH67bff9Pvvv2vt2rU6efKkTp8+LcMwVKlSJQUHB6tjx44aMGCAunfv7nQ265EjR2rQoEH6/PPP9euvvyoqKkpnzpxRQECA6tatq+uvv14PPfSQ0yWxXOHOO+/UoEGD9M0332jRokXasGGDTp06pfPnz8vPz0+1a9dWs2bN1L17d/Xv318hISGXdb7HHntMXbt21fvvv6/w8HAdPXpUPj4+ql27tvr06aOHHnoox6zuhfHGG2+oW7dumjFjhjZu3KgTJ04UagK8wijPz62r+fr66tdff9WcOXM0a9YsRUZGKi4uTlWqVFGjRo10++23a8SIEapYsaKWLl1aqD6L+x5bkuLi4nLMEj9q1CgNGTKkwGM6dOigCRMm6OWXX5Zkn2eiU6dOqlu3bqnHCwDIzTDNMpiaFwAAAAAAFAuT0wEAAAAAYGEk7gAAAAAAWBiJOwAAAAAAFkbiDgAAAACAhZG4AwAAAABgYSTuAAAAAABYGIk7AAAAAMDh5MmT+uWXXzR+/HjdfPPNqlKligzDkGEYGjFiRKmc8+uvv1a/fv1Uo0YNeXt7q379+ho2bJjWrFlT6D4SExP19ttvq3379qpUqZL8/PwUGhqqMWPG6ODBg6USd1lhHXcAAAAAgINhGPnuGz58uGbOnFli50pKStLQoUO1cOHCPPfbbDaNHz9er7zySoH97NmzR/3799fu3bvz3B8YGKivvvpKt9xyy2XH7AqMuAMAAAAA8lSvXj3169ev1Pp/6KGHHEl77969NX/+fK1fv17Tp09X48aNlZmZqQkTJmjq1Kn59hEfH68BAwY4kvaHH35YS5Ys0erVq/X666/L399fcXFxuuuuu/TXX3+V2u9SmhhxBwAAAAA4vPLKK2rfvr3at2+v6tWr68CBA2rYsKGkkh1x//PPP9W3b19J0sCBA/Xjjz/Kzc3NsT8mJkZt27bVoUOHFBQUpH379qlixYq5+hk/frxeffVVSdLbb7+t559/Psf+1atXq2fPnkpPT1fPnj21dOnSEom/LDHiDgAAAABwmDhxom655RZVr169VM/z7rvvSpLc3d316aef5kjaJalKlSp66623JEmxsbGaNm1arj7S0tI0ZcoUSVKzZs00ZsyYXG26dOmikSNHSpKWLVumDRs2lOjvURZI3AEAAAAAZSo+Pl5LliyRJF1//fWqU6dOnu1uu+02BQYGSpJ+/PHHXPvDw8N17tw5SfZqAJst7xQ3+6R6efVjdSTuAAAAAIAytWHDBqWmpkqSevbsmW87T09PderUyXFMWlpajv0rV650bBfUT7t27eTr6ytJWrVqVbHjdhV3VwdQWpKTk7V161ZJUtWqVeXufsX+qgAAAIDlpKen69SpU5KkFi1ayNvb28URFV16erqOHz/u6jDydOzYsULlOfmNZLva9u3bHduhoaEFtg0NDdXixYuVnp6u3bt3q3nz5kXux93dXcHBwdqyZYt27NhxGZG7xhWbzW7dulUdOnRwdRgAAADAVW/9+vVq3769q8MosuPHj6tu3bquDuOyWHUu8iNHjji2nX25kP05OHz4cI7EPasfPz8/BQUFOe1ny5YtOnXqlFJSUuTl5VWMyF2DUnkAAAAAQJmKj493bPv7+xfY1s/Pz7F9/vz5PPtx1oezfqzuih1xr1q1qmN7slFXlYwr9lfFVW7Le0tcHQJQqq4JNlwdAlCqeuyc4uoQgFJxPDZePV6zzwKe/bN5efWeWz1VkpvzhqXsjDL0z4xDkuyVDDVr1nRxRMWTnJzs2Pb09CywbfaR8aSkpDz7cdaHs36s7orNZrNf61HJcFcVw8OF0QClJ7CSNa9bAkpKleok7riy1T4Z6OoQgFJ3Jcw3VUlu1sgpslW+16xZ07LXsDuTfc6DrEnq8pOSkuLY9vHxybMfZ30468fqyv9fEAAAAACUMsPDkGG4/stkwzSkDFdHcfkCAgIc287K1hMSEhzbl5bEZ/VTmNL3gvqxOq5xBwAAAACUqeyVAtknqsvL4cOHHduXThaY1U9CQoJiY2ML1U/VqlXL1cR0Eok7AAAAAKCMZZ8ZfufOnQW2zdrv7u6uJk2aFKuf9PR07d27V5LUrFmzIsfraiTuAAAAAOCE4WbI5u76m+Hm+nL9ktC+fXvHhHLLli3Lt11qaqrWrl3rOMbDI+c8A926dXNsF9RPRESEo1S+a9euxY7bVUjcAQAAAABlKiAgQH379pUk/fHHH/mWy//www+Ki4uTJA0ZMiTX/l69eqlChQqSpFmzZuW7bv3MmTMd23n1Y3Uk7gAAAACAEjVz5kwZhn1CvwkTJuTZ5rnnnpNkL2N/4oknlJGRc9a9mJgYvfDCC5KkoKAgjRo1Klcfnp6eevrppyVJO3bs0LvvvpurzZo1azR9+nRJUs+ePdW+ffti/16uwqzyAAAAAOCE4WGTYXP9uKeRWfoxrFy5Unv27HH8HBMT49jes2dPjtFrSRoxYkSxztOnTx/dfffd+uabb/TTTz/phhtu0DPPPKNatWpp69atev3113XokH3N+rfeeksVK1bMs5/nn39ec+fOVVRUlMaOHas9e/bo7rvvlo+Pj8LDw/XGG28oPT1dPj4++uCDD4oVq6uRuAMAAAAAHKZNm6ZZs2bluW/VqlVatWpVjvuKm7hL0hdffKG4uDgtXLhQ4eHhCg8Pz7HfZrPp//7v//TII4/k20dAQIDCwsLUv39/7d69W1OnTtXUqVNztAkMDNRXX32l1q1bFztWV3L9V0YAAAAAgKuSj4+PwsLC9NVXX+mGG25QtWrV5Onpqbp16+ree+/VypUr8y21zy44OFiRkZF666231K5dOwUFBcnX11dNmzbVs88+qy1btuiWW24p/V+olBhmflfvl3NHjhxxrPE3w9ZQVQwPJ0cA5VPEjG2uDgEoVW2aXRmz5wL5uWHra64OASgV0WfiFPLc+5Ls62dnX7e7vMieU3xVOURV3VyfU5zKSNN9p6Mkld/HFUXHiDsAAAAAABZG4g4AAAAAgIUxOR0AAAAAOGF4GDLcXH/5lmFzfQwoe4y4AwAAAABgYSTuAAAAAABYGKXyAAAAAOCEzc2Qzd31Zeo2uT4GlD1G3AEAAAAAsDASdwAAAAAALIxSeQAAAABwwvCQDAuUyhuuDwEuwIg7AAAAAAAWxog7AAAAADhhWGRyOsN0fQwoe4y4AwAAAABgYSTuAAAAAABYGKXyAAAAAOCE4WbIcHN9mbqR6foYUPYYcQcAAAAAwMJI3AEAAAAAsDBK5QEAAADACZvNkM0CpfK2DNfHgLLHiDsAAAAAABZG4g4AAAAAgIVRKg8AAAAAThg2Q4bN9WXqVogBZY8RdwAAAAAALIzEHQAAAAAAC6NUHgAAAACcMNxsMtxcP+5phRhQ9njWAQAAAACwMBJ3AAAAAAAsjFJ5AAAAAHDC5mbI5ub6Gd2tEAPKHiPuAAAAAABYGIk7AAAAAAAWRqk8AAAAADhhGIYMm+vL1A3D9TGg7DHiDgAAAACAhZG4AwAAAABgYZTKAwAAAIAThps1ZnQ33FwdAVyBEXcAAAAAACyMxB0AAAAAAAujVB4AAAAAnDDcDBmWKJV3fQwoe4y4AwAAAABgYYy4AwAAAIAThmGTYXP9uKdhuD4GlD2edQAAAAAALIzEHQAAAAAAC6NUHgAAAACcMGyGDJvrJ4azQgwoe4y4AwAAAABgYSTuAAAAAABYGKXyAAAAAOCEzc2QzQJrqFshBpQ9RtwBAAAAALAwEncAAAAAACyMUnkAAAAAcIJZ5eFKjLgDAAAAAGBhJO4AAAAAAFgYpfIAAAAA4IRh2GTYXD/uaRiujwFlj2cdAAAAAAALI3EHAAAAAMDCKJUHAAAAAGdsFpnRnaHXqxJPOwAAAAAAFkbiDgAAAACAhVEqDwAAAABO2NwM2dxcXypvhRhQ9hhxBwAAAADAwkjcAQAAAACwMErlAQAAAMAJw2ZYYlZ5K8SAsseIOwAAAAAAFkbiDgAAAACAhVEqDwAAAABOGIZNhs31456G4foYUPZ41gEAAAAAsDBG3AEAAADACSangysx4g4AAAAAgIWRuAMAAAAAYGGUygMAAACAE5TKw5UYcQcAAAAAwMIYcb8KxJrpilKyosxk7TaTtVvJilemJKmPEahnbTVK/JzLMuP0hxmnA0pRgjIVJDddY/hogBGkUMOnUH0km5kKM2O10ozXcaUpTaaqyF3tDX8NNIJUzfAo8bhxZYmNOah1iz/R7s0LFXf6iNw8vFSpWiM17zBUHa7/hzy8fC+r/79WzNaCz0cVqu2gh6epdfcHct2/9Id/a9n814p03p6DX1av28YX6RhcmU4eO6hfvvlYG1f9qpgTh+Xh6aUatRup6w13qP8d/5CX9+W9xg/v36EtG/7Unu0ROrhnm86dPaW42BjZbG4KqlRdwc3bqcdNd6tDj4EyDOcjQBnp6fp9wXQt++1rRR/YpeSk86pYpZZadeijW+56UvUaX3NZ8aL8OxQTq0//WKdFW3bryJk4eXm4qWHVSrqtfXM92qeDfL1K/n9/Ykqa2v/fpzoQEytJqle5gna880yebV+fv1Rv/LSsSP2Pu7WnXhrc6/KCBHDVI3G/Ctyfua/MzpViZurNzGOKUEKO+08pXUvNeC0343W3UVn32CoX2M9RM1UTM6N1VGk57o9WmqLNs1psntMYWw11MPxL/HfAlWFX5C/68bMRSkmKc9yXlpqoo/s36uj+jYpc9oXuHbNAlaoHuzDK4qlcM8TVIcAC1i//RR+MH67EhIuv8ZTkRO2J26g9Ozbq9/lf6P8+WKCadYv/Gp/3xSQt++3rPPedOLpfJ47u16o/vtM11/XQC299q8Cg/N/b42Jj9Orogdq9PSJnP9H7tPjHfQoP+58eef5D3TB4ZLHjRfm28K9dGvn5j4pLSnHcl5iaprMJR7XpwFHNWh6p75+5V42rVyrR8746P9yRtJeGJjUK/syD8oNSebgSiftVpqrcVUeeilRiqfQ/xTzhSNpbykcDbRVVSe46aKboO/OMjilNc8zTqpjppptsQXn2kWhm6t/ZkvYbjQrqbgTIS4a2mImaZ55RojL1duYxvW2rq0aGd6n8Lii/jh2I1LxP7lN6apI8vf3V7ZaxatCsl9LTkrRt7bfatHS6Th/frTmTB+nhiWvl5RNw2ecc9nyYAirWzHd/YMU6ed7f/vrH1LzDbQX2nZmZoZmv91VKUpy8fAIV2nbQZcWK8m/frki9O+5epaYkydvXX7ePeEEt2vZUakqyViyeq9/nT9fRQ1F69ZlBmjx7rXz8ivcat7m5K+TaDgpt2UX1g69Vxco1FFixihLiYnXk4E4t+uFzHdr7t/7etFyv/3OwJk1bJpst91V4GRkZmvT8UEfS3qn3EPUbPFL+FSoqatt6fffFJJ07c1L/mfS4KlWtrbZdb7qsxwflz18Hj+mBz+YpKTVd/l6eem5AN/UIbaCktHTNW7dNM5Zv0u4Tp3X7B3O0YvzDCvDxKrHzfvL7Wnl7uMvDzab45NQC2z/cp70Gt2teYJuMzEzd+NZMxSWlKNDHSwOvCy2RWAFc3UjcrwJ3G5XUxPBWE3mrouGuE2aaRmXuL/HzbDYTtdyMlyR1kJ/G2WrJ7ULpZIjhrQ6mv57NPKhTStdMM0bdzAD5G265+vnBPKPoC0n7g0YV3Wa7+M16qOGjFqavXsw8rBSZ+jzzlCa51S3x3wXl229fjVF6apJsbu4a9vxC1W3SybGvYfPeqlQ9WH/MfVGnj+/Wml/fL5Gy88o1miioaoMiH+cXWE1+gdUKbLN782+OyoHmHW6Xh2fhLjfBlWva5H8qNSVJbm7umvDRQoW27OzY17J9b9Wq10SzpvxLRw9Faf5X7+ueR4r3Gn/y5alyc8/7o0Krjn110+2P6Z0X79Ha8B+1a+taRawIU4eeA3O1DQ+brR1/rZIk3Tz0MT36wkeOfSHXdFDbLjdpzP0dlZgQp2mTn1Xrjtfne15cmcZ+/ZuSUtPl7mbTT2OGqWPwxf/tvZo1VOPqlfTyd39o94nTmrJoTYmUnmdkZurJmT8rI9PUuFu7adaKSKeJe7VAP1UL9CuwzaItux1VA0PaNZePJ5f2oXw7ePCgpkyZorCwMB0+fFheXl5q3Lix7rzzTj3xxBPy9S3eZVkHDhxQw4YNi3RM/fr1deDAgVz39+rVS8uWFe4yFtM0i3ROq3DJ5HQzZ86UYRgyDCPPBx4l6z5bFXUw/FXRKN0PQT9mnpEkuUn6h62aI2nPUsFw0wijiiQpQZlabJ7L1Ue6aeoXM1aSVFeeGmxUzNWmmeGjG4wKkqRtSlKUmVyCvwXKu+i9G3Ro10pJUpseD+ZI2rN0uflZVallHwFZt/hjZaSn5WpjJVtWfenYbtV1mAsjgRVE/b1e2yPtr/HrBz2YI2nPMui+Z1WnYTNJ0i/ffKT0Yr7GnSXPbm5uGnL/Px0/b/9rZZ7t5n/5viQpoEIljRj9Vq79NesG6/YRL0iSjh3eo7VL5xcrXpRPEfuitSrqkCRpePc2OZL2LKNv7KLQmvbPEJ/+sU5p6RmXfd5Pfl+nyIPHFFKjsv7Zv9tl95dlzuotju17u7QqsX7hevZSeZsFbmVXKv/zzz+rZcuWeu+997Rr1y4lJibq7NmzioiI0NixY9WmTRvt2bOnzOJp2rRpmZ3LaphVHiUi0czUZiVJklrJV1XymTiusxEg3wsvuzXm+Vz7tyhRCdkmzrPlM9lRXyPQsb02j35w9dq5aYFju3WP4Xm2MWw2RwKcnBirAzuWlkVoxZKSFKedm36WJAVVbah6TUvuwyXKp3VLf3Js9x2Y92vcZrOpd3/7azwhPlZbI5aWWjw+vhfL8FNTc3+RGn0wSkf275Akdb1+aL4T5vW55eLkjWuXLsizDa5MP0fudGzf37V1nm1sNkP3XEiCYxOTtWzngcs656GYWL02P1yS9OEDt8jTPXcFYHHEJaUo7C/779OgSpC6htQrkX4BV4iMjNRdd92luLg4+fv76/XXX9fq1au1ZMkSPfzww5KkqKgoDRgwQPHx8UXuv3bt2tq6davT27333us4ZvjwvP/vZWnXrp3T/sor6tBQInYrWemyl51ca+RfLuNhGGoqb0Uq0X6Maco9W3K+3UxybF9bwOzzTeQtLxlKkZnjGOBQ1GpJkoeXn2o1uC7fdvVDe1w8ZvdqNW5xQ6nHVhx/r/9e6an213jLrvcWauZuXNl2bLaXnHv7+KlxaNt8211zXXfH9s7Nq9WmU+m8xlcs/taxXad+7pGQrHjtMfXItT9LxSo1VKteiI4eitLOzatLNkhY2prd9tF2Py8PtWlQK9923ZvWd2yv3XNI11/buNjnfObLhUpISdM9nVuqR2iDYvdzqR83/K2k1HRJ0j1dWvKejXJt9OjRSkpKkru7uxYvXqzOnS9WePXp00dNmjTR2LFjFRUVpcmTJ2vChAlF6t/Dw0PXXnttgW0yMjK0dOlSSVJAQICGDBlSYHs/Pz+nfZZXJO4oEYfNizPA1jE8C2xbx/BUpJmoDElHlap6ujjBzGHz4rVldZR/P26GoZry0AGl6ogKvh4NV5eYo/aRjkrVG8vmlv9bXJVaFxOMrGMux4JpDyvmWJQS42Pk5ROoStUbq9E1fdSuz6MKrFS72P1uWUmZPHI6st/+eq1Rp3GBpex1GlycEOvwgct/jWcXFxujo4d26/cFX+jPn2dJkgKDqqjHzffmant43w7Hdu08Evvs6jRoqqOHohRz4rCSkxLk7VPwtcS4Muw6GiNJalStktzd8i8GDblQKp/9mOL4bt02LdqyWxX9vDXprn7F7icvlMlf2QybIZub67+MKYtS+fXr12vFihWSpJEjR+ZI2rOMGTNGM2bM0I4dO/Thhx/qpZdekodHyc7p8Mcff+jo0aOSpKFDh8rH5+qd56dMS+WXLl0qwzD04IMPOu5r2LCh43r3rFvWtyooP2KU7tiu4uT7oOz7sx8nSacv/OwtI8+J63L2Y39jOKcMpZmZRYoXV6b01GQlxts/zOU3i3sWH7+K8vCyJwVxp49c9rkP7Fim87HHlJmRpqTzpxW9d71W/PSmPnq+mSL+/LxYfcaeOqCDUfZrhus26aJK1Ys/uoQrQ2pKsuJi7a/xKtULfo37B1Z0JL6nTxy+7HO/9GhfDW7vocHtPfTADTX1r5E9tOSnmTJNU4FBVfSvd76Tf0BQruNOn4x2bDuLOWu/aZo6feLy/y5hfclp6Yo5b1/ppnbFwALbVvTzkd+FddyPnI0rsG1+ziYkaezXv0mS/n379arqZKK5ojgYE6tVuw9KkjoH11WjaiW7bB1QlubPn+/Yzp67ZWez2fTAA/bLnGJjYxUeHl7iccyePdux7axM/krHiDtKRJIuJs7eTr4Pyr4/+3GSlHjhZ2d9SJK3YehCdb6SZIo5W5GSfPH6Kk9v5x/GPL38lJaSoNSU4s+TULFaI4W2Hay6wR0VWNk+odLZk/u0I+JHbd/wg9LTkhU28wkZhqG2vUcVqe/Nq76SLsx82qrb/cWOEVeOpMSLr3FvH3+n7b18/JSclKCkxNKbC+SWu57UnaNeUmBQlTz3FyVmr2wj7ElJzF9yNYjPtma7v3fBFXuS5OvlqYSUNJ13Mvt7fl769nedjEtQx8Z19GDP/C+nKo6vV2/OesvWfV0ZbUf5tnKlfeDAz89Pbdvmf1lWz549HdurVq1Sv34lV8USHx/v+AKhQYMG6tEj/8utrgZlmri3b99eW7du1YIFC/Tyyy9LkhYtWqRatXJez1TUZQHgemm6uKyCswTaQxfLe1JNU9l+dPTjLuclQDn6Uabs89njapaednFiLDd35x8A3dztl2mk5TGhVmGEth2kVt3uz3UNY+1G7XRtpzsVFRmmuVPuVGZGmhZ99ZyatrlF/kE1Ct3/1tVzJEnuHt66puPQYsWIK0tqysXXqnshyhE9PLxyHVdcT4+fpuSkBJkylRB/Tnt3ROi376dq4Xef6nj0fj358n8VVLm6k5gL/rvMitd+HPOXXA1S0i9W3nm4Of8/7nVhErnktKKvlLBy10HNXhkpdzebPnzglhK//vybNfZJr7w93HVb+2tKtG9Yg31W+aujVH7HDvtlTsHBwXIv4LKs0NCLl2VlHVNS5s2bp8REe0XO/ffn/ryVl507d6pjx47atWuXkpOTVaVKFbVt21a333677rnnnhIv5S9LZZq4Z00WEBER4bgvJCREDRo0KHJfR44UXEJ37NixIveJ4sueRDv7V5o9yfe85A8wq590OV9fMUc/LJBwxYs7E63kxLN57vP2rajASrXl7uHtuC8j3floTEa6faTHw9PbScu8eftWKHB/SJsB6jn4JYV/P0FpqYnatHyGetz6YqH6PrJnnU4f3y1JanrdQKfnQvl3+mS0zsfl/Rr3D6yoytVqy9Pr4ms1vRCJS1qa/TWe/bjiql4755fq17Tppptuf0xv/+tuRawM03PDO+vN6ctzlcPnjDm1wFiy4rUfd/Vex3g18cqWEKRlOF/iLeXCMnDeRfzwnZKWrqdm/SzTlB6/vqNa1M39JdPlWL/3iHafOC1JuqVNU1Xwvfy/OaAwCpPz1KlT8GVKl0pOTlZMTEyhjq1YsaL8/PyUkJCgw4cv/7Ks7LKXyWeV5Dtz4sQJnThxwvFzdHS0oqOj9dNPP+mtt97SvHnz1KxZsxKNs6yU21L5unVzr/EJ1/HJljgnq+DrzbPv97kk4c5aKs5ZH5KUbF5M3H0KMUKP8u3PeeO1eeX/8tzXqtv9GvzIdHl5Z1uWKjnBaZ+pKfY2nl7OS46Lq23vUQr/YaJkmjq4c4VUyMR9c/ZJ6boxKd3V4MtP/0/hYXm/xnsPuF+jJ3yRY+m15EKUkqck2V/jPr6l8xr39PLW069M08MDGyvmxGHN+uhFjXkt5+9wacwFJe5Z8UqSTyEuBUD5F+BzscqiMOXviSn2NoUpq8/u7V9WKOr4adWpFKiXB/cq0rGFMWf1Zsf2PUxKhzLUoUMHp21M0/mAWHbZl3bz93f+XpyVuJ8/X3KXOB06dEjLli2TJHXp0kXBwcEFtrfZbOrbt6/69++vVq1aqXLlyoqPj9emTZv03//+Vzt27ND27dvVu3dvrV+/XvXqlb+lGstt4g5ruXTCuSYFtC1oIrvKF35OlqnzZkaBE9TFXBjbryA3eRiMuENy9/SWj39lJZ0/rbizBVflJCWcVdqFxD2wctG+iS4Kv8Bq8vWvrMT4GMWfjXZ+gOzVAn+v+85+fIXqatyiZGc9Rvnl6eWtgAqVFX/utGKcTN52Pu6ski8kwpWrl96X3YFBVRTaqos2r/tD65f9pPT0NLm7XxwNrVzt4qoKMSeO5HstfNZ+STIMQ5WdTGSHK4O3h7sq+/vo9PkkRTuZcO5sQpISUuz/++s4mcjuUu/9al+WsHfzRlr4V1SebRIv9J2Ykqbv1m2TJFUN9FOvZgVfwpmanqHv1/8tSaoW6KcbLmOZOlibYbPJsLn+M2dpx5CcfPESJ09P51+SeXnZv4BLSiq5S5y+/PJLxxcOhRlt/+GHHxQUFJTr/u7du+vxxx/Xww8/rFmzZunEiRN65pln9MMPP5RYrGWl3Cbuzkoxjh07VqhvoFAy6hpejonijpipKmgA/MiFJd/cJNW6ZMm3uobnxX6UqlDlXSqZYZo6fiFxL2jZOFw5Bj8yXYMfme60XdXazXRo10qdObFXmRnp+S4JF3N0l2O7Sq3QPNuUnKJVhERFhikp4YwkqUXne2SzMX/D1WD0hC80esIXTtvVbdRM2yNX6viRvcpIT893Sbgj2ZaAq9ugdF/jFS4k4ynJiYqLjVGlKjVzxJsl+uAuNWraOt9+jhyw/11WqV6XpeCuIqG1qmpV1CHtO3lG6RmZ+S4JF3Xs4hJwTWvl/wVQXlIvlNj/b+Vf+t/KvwpsG3M+USP++70k+9rxzhL3XzdH6UyCPWG5q1MLuVkgscPVY/369apZs6bzhkXg7X2xMio11XklTEqK/TKnklyq7X//s1dveXl56a677nLaPq+kPYuHh4emTZumtWvXateuXfrxxx8VHR2t2rWLv1yvK5Tbd5Y6deoUeCvpFzAK1kTejgnltpmJ+bZLM03tUvLFYy65xr25cfEPfpuZ/7d2u5Ws5AsZfvZjgHohXSRJaSkJOnpgU77tDu5cfvGYJl1KLZ6EuFNKPG//sBkQVMtJa7vNq7KXyd9XKnGh/GrWqqskKTkpQXt3bsy33d+bVji2Q1uV3mtckk6fOurYvrTEPStee0zLlZ+zMcd19JB9JLS044W1dG5iL1lNSElT5IGj+bZbseugY7tTsHXKXLOXybN2O8pazZo1neZFRRUQcPESp8KUvyck2Ku7ClNWXxjr16/Xzp32L59vvfXWApPywnJ3d9fIkSMdP2eV4Zcn5XbEHdbia9jUSj7aqERtVqJizDRVMXJPHLPGjHcs+dbZyP3H3UK+8pNNCcrUn2acbjcr5jmD5BLzYjldpzz6wdUr9LpBWvnz25Kkv5bPUp3GuStvzMxMR3Ls7RukBs16lVo8G5dOcyzpVj+0u9P2ifGntXuzfY3h6vVaqkY9PgQip469btX3M9+SJC35eZZCru2Yq01mZqbCF9pf434BQWrRrlepxRNz4oh2bV0rSapas758/AJy7K9dP0R1GjbTkf07tOqPeXrwmXfk5e2bq58/f7k4CVGnXoNKLV5Yz8A2oXo3zL701P9W/aX2jXMnGpmZpr6+kCAH+XqrZ2iDIp0j4YtXnLZp9vwHOnT6nOpVrqAd7zxTqH5Pn0/Uoi32iURb1K2ulvUKv3IIyp+rZVZ5b29vVa5cWadPn3Y6IfjZs2cdiXtJzUFWnEnpCqN58+aO7ejowl2+aCXldsQdZeuPzHMamBGlgRlRmpMZk2ebIbZKkqQMSZ9lnlTGJRNhnDMzNNO0H+snm/oZuWfJ9jAM3WIESZIOK1U/mrlnWN5pJul385wk6Vr5KMRg5lZcVLtxe9Vr2k2SFLl8hg7vXpurzepf31fMUfs3uR37PSk399xfMh3YsUwTH/DUxAc8NX/qyFz7Y08d0LEDkQXGEhUZpuXzX5ckuXv6qHX34U7j37Z2rjIz7JeBtOrKpHTILeSaDmrexv4a/2PBDO3csiZXmwVfva8j++3L8txy91M5rjnPsnXjMg1u76HB7T304YSHcu2PPhilLRvCC4wl4fw5vfd/9ys9zV5K2bt/3q/ZwcOelSTFnzujWVP+lWv/sSN7HV9G1KwbrE69Bhd4XlxZ2jWqra4h9hH0WSsitW5P7sshP1y0WjsvlMo/fn1HebjnvIRo+c4D8ntoovwemqhHps8v9ZizfLdum9Iy7AMSjLbjSpKV5O7Zs0fp2ZZtvFTWyLikEpmtPS0tTd98840kqVq1arrpppsuu88sJb0EZFlzyYh7eX/Qypu/zSQdMy9enxKni8utHDNT9UfmuRztr7cVb9mpVoavehgBWm7Ga50SND7ziG61VVQlueugmaJvzTM6dWFiuhFGlXwnnrvNqKSVZryilaYZZoyOZqaphxEgTxnaaibpO/O0MiR5ytDDtqrFihVXtpvum6wvXuul9NQkfflOf3Ub+IIaNuultNQkbVv3rTaFT5MkVa7RRJ1vfrZY54iNOahZk25QneBOCmkzQDXqtZRfQDVJ0tlT+7R9ww/avuEHx2h7v7vfUmAl59dSZVUC2Nzc1aLLPcWKDVe+UWPe079G9lRqSpImPNVfQ0f8Sy3a9VRqSrJWLJ6rxT/aX+O16oVo8H3Fe42fiTmm8Y/3U4MmLdWx1yA1Dr1OFStXl5u7u87GnNDOLav1x4IZOnv6uCSpXuNrdPuIsXn21XvAA1ry00zt2LxaC7/7j86ePqF+g0fKLzBIu//eoG+nv6HEhDjZbDaNGvN+vtft48r19j036fpJXygpNV23Tv5Sz93STT1DGyopNU3z1m/TF8vslz41qV5ZT9/Y2cXRXpRVJu/uZtNdnVq4OBqg5HTr1k0rVqxQQkKCNm7cqI4dc1d3STlLzrt27Zpnm6IICwvT6dP2pRXvvffeAteQL6rt27c7tmvVKtzli1bikv+M2Sc8yJrMAKVnsXlOf5p5z9S6Q8naYSbnuO96FX+96KeN6ko0MxWhBG1RkrZk5rxO3SbpLqOSbrIF5duHr2HTeFttTcyM1lGlaZF5TovMnF8u+MqmMbYaasRoO/JQs0EbDX3iK/342QilJMXpz+/+L1ebyjWa6N4xC+TlE5BHD4V3ZM9aHdmTe1Q/i4enr26871217T3KaV8xR3fq6L4ISVLja2+Qf4WSXWcYV45GTdvouTfm6IPxw5WYEKcvP305V5ta9UL0fx8syFW6XlQHdm/Rgd1bCmzTrlt/PTV+Wp4l8JLk5uamF9/9Xq+OHqjd2yO05s8ftObPnDP6enh66ZHnP1TbriU3uoLyo3X9mpr92FCN/PxHxSWlaML3f+Zq06R6ZX3/zL05lpBzpV3HYrRxv/2a/L7XNFb1Cly6d6UzDIuUypfBIOjgwYM1adIkSdKMGTPyTNwzMzMdZe1BQUHq3bv3ZZ83e5n88OHOKxULKz09XV98cXEC2B49epRY32XFJYl79onj9u7dq6ZNm7oiDJQCL8OmV9xqa2lmnJaYcTqgFJ1XpoLkpmsMH91iBCm0EJPJ1TI89aGtvsLMWK0043VMaUqXqSpyVzvDT7caFVUtj2vogSxN29yix17fqHWLPtbuzQsVdyZabu6eqlS9sZp3uF0drn9cHl55JxmFUbPBdRry2Ewd2bNOR/dv1PnY40qMj1FmZrq8/SqqWu3mati8t67r9ZD8AqsVqs/Nq75ybLfsyqR0KFiHHrfog6836ZdvPlLEyl91+uQRuXt4qmadxupy/VANuPPxfBPpwmjWqote+WihNq9for07Nur0yWjFnj6hlORE+foHqlqtBmp6bUd1v/GuHBPQ5ScwqIrenL5Ci+dP0/JF3+jI/p1KSU5QxSq11LJ9bw28+ynVa3xNseNF+de/dVOtm/iYPvljnRZt3q3os3HydHdTo2qVNKR9cz3Wp4N8vazzv//rHJPStXRhJEDJ69Chg7p3764VK1Zo+vTpGj58uDp3zlntMnnyZO3YYb8sa/To0fLwyPn3uXTpUkcyP3z4cM2cObPAc545c0ZhYWGSpBYtWqh169aFijU8PFxt2rTJdxK7tLQ0Pfzww45YBw4cWGLX45clwzQvuRC5DMTHx6tatWpKTk7WddddpzfffFP169eX7cLyGbVr177s5QSOHDnieEJm2BrmOVEacCWImLHN1SEApapNM9ePbgCl6Yatr7k6BKBURJ+JU8hz70uyL+VcnBnOXS17TrH2vptU09/1qxkdO5+kTl/ZJ7Itzcc1MjJSXbt2VVJSkvz9/TVu3Dj17t1bSUlJ+uabbzR16lRJUkhIiCIiInLMRi8VPXH/9NNP9cQTT0iS3n33XY0ZM6ZQcY4YMULff/+9br31VvXq1UtNmzZVYGCgzp8/r40bN2rq1KmOMvlq1app7dq1atiw4GUercglI+4BAQF6+umn9fbbb2vTpk3q169fjv3h4eHq1auXK0IDAAAAgKtemzZtNHfuXA0bNkxxcXEaN25crjYhISEKCwvLlbQXR1aZvJubm+67r2iVh+fPn9ecOXM0Z86cfNu0aNFC33zzTblM2iUXLgf35ptvqkmTJpo9e7b+/vtvnTt3ThkZGc4PBAAAAACUuoEDB2rLli368MMPFRYWpiNHjsjT01PBwcG644479OSTT8rXt/iXZWXZvXu31q1bJ0m64YYbVKNG4ZdWfOGFF9S6dWutWbNG27dv16lTp3TmzBl5eXmpevXqateunYYOHaohQ4bIzS3vybHLA5eUypcFSuVxtaBUHlc6SuVxpaNUHleqK61Uft39N6mm/+UnqZfr2PlEdfxf6ZfKw1pYxx0AAAAAAAsjcQcAAAAAwMJcdo07AAAAAJQXhs0mw+b6cU8rxICyx7MOAAAAAICFkbgDAAAAAGBhlMoDAAAAgDOGYb+5mhViQJljxB0AAAAAAAsjcQcAAAAAwMIolQcAAAAAZwxDhs0CZeqUyl+VGHEHAAAAAMDCSNwBAAAAALAwSuUBAAAAwAnDZpNhc/24pxViQNnjWQcAAAAAwMJI3AEAAAAAsDBK5QEAAADACcNmjVnlrRADyh4j7gAAAAAAWBiJOwAAAAAAFkapPAAAAAA4wazycCWedQAAAAAALIzEHQAAAAAAC6NUHgAAAACcMGzWmNHdYOj1qsTTDgAAAACAhTHiDgAAAABOGIZF1nE3XB8Dyh4j7gAAAAAAWBiJOwAAAAAAFkapPAAAAAA4Y7PZb65mhRhQ5njWAQAAAACwMBJ3AAAAAAAsjFJ5AAAAAHDCMAxLzOhuhRhQ9hhxBwAAAADAwkjcAQAAAACwMErlAQAAAMAJw2aTYYEZ3a0QA8oezzoAAAAAABZG4g4AAAAAgIVRKg8AAAAAThg2Q4bN9TO6WyEGlD1G3AEAAAAAsDASdwAAAAAALIxSeQAAAABwxjAkK8zoblAqfzWywCsPAAAAAADkh8QdAAAAAAALo1QeAAAAAJyxyKzyskIMKHOMuAMAAAAAYGEk7gAAAAAAWBil8gAAAADghGHYZBiuH/e0QgwoezzrAAAAAABYGIk7AAAAAAAWRqk8AAAAADhjM6wxo7sVYkCZY8QdAAAAAAALY8QdAAAAAJwwbDYZNtePe1ohBpQ9nnUAAAAAACyMxB0AAAAAAAujVB4AAAAAnDAMQ4YFJoYzDNfHgLLHiDsAAAAAABZG4g4AAAAAgIVRKg8AAAAAzhiGZFhg3JNS+auSBV55AAAAAAAgPyTuAAAAAABYGKXyAAAAAOCEYbPIrPIWiAFljxF3AAAAAAAsjMQdAAAAAAALo1QeAAAAAJyx2ew3V7NCDChzPOsAAAAAAFgYiTsAAAAAABZGqTwAAAAAOGEYhgzD9TO6WyEGlD1G3AEAAAAAsDASdwAAAAAALIxSeQAAAABwxrDIrPKGBWJAmeNZBwAAAADAwkjcAQAAAACwMErlAQAAAMAJw2bIsLl+RncrxICyx4g7AAAAACBPBw8e1JgxYxQaGio/Pz9VqlRJ7du31zvvvKPExMTL6nvmzJmOZfac3WbOnOm0v8TERL399ttq3769KlWqJD8/P4WGhmrMmDE6ePDgZcXqaoy4AwAAAIAzhmGNieHKcB33n3/+WcOGDVNcXJzjvsTEREVERCgiIkLTpk1TWFiYgoODyyym/OzZs0f9+/fX7t27c9y/a9cu7dq1S9OmTdNXX32lW265xUURXh4SdwAAAABADpGRkbrrrruUlJQkf39/vfjii+rdu7eSkpL0zTff6PPPP1dUVJQGDBigiIgIBQQEXNb5Fi1apFq1auW7v06dOvnui4+P14ABAxxJ+8MPP6y7775bPj4+Cg8P16RJkxQXF6e77rpLq1atUuvWrS8rVlcgcQcAAAAA5DB69GglJSXJ3d1dixcvVufOnR37+vTpoyZNmmjs2LGKiorS5MmTNWHChMs6X0hIiBo0aFCsY9955x1FRUVJkt5++209//zzjn2dO3dWr1691LNnTyUmJuqZZ57R0qVLLytWV7BArQcAAAAAWJzNsM6tlK1fv14rVqyQJI0cOTJH0p5lzJgxatasmSTpww8/VFpaWqnHlZe0tDRNmTJFktSsWTONGTMmV5suXbpo5MiRkqRly5Zpw4YNZRpjSSBxBwAAAAA4zJ8/37H94IMP5tnGZrPpgQcekCTFxsYqPDy8LELLJTw8XOfOnZMkDR8+XDZb3inuiBEjHNs//vhjWYRWokjcAQAAAAAOK1eulCT5+fmpbdu2+bbr2bOnY3vVqlWlHldesmKVcsZzqXbt2snX11eS62K9HCTuAAAAAOCEYdgscyttO3bskCQFBwfL3T3/adFCQ0NzHVNcDz74oGrVqiVPT09VqVJFnTp10ssvv6zo6OgCj9u+fXue8VzK3d3dMfv95cbqCkxOBwAAAADl0LFjx5y2KWg29rwkJycrJiamUMdWrFhRfn5+SkhI0OHDh4t0nktlnzDu9OnTOn36tNatW6fJkyfrgw8+0KOPPprncUeOHJFkrw4ICgoq8Bx169bVli1bdOrUKaWkpMjLy+uyYi5LJO4AAAAAUA516NDBaRvTNIvUZ3x8vGPb39/fafusxP38+fNFOk+WRo0a6bbbblPnzp1Vt25dSdK+ffv0/fffa968eUpOTtZjjz0mwzD0yCOP5BtvYWPNcv78eRJ3q9ny3hIFViraN01AedHuwWtdHQJQqnovfd3VIQClalvn0a4OASgVJ45HS3rf1WGUnDKa0b1QcZSi5ORkx7anp6fT9lnJb1JSUpHPNWTIEA0fPlyGkfN3at++ve666y798ssvuu2225SWlqZnn31Wt956q2rUqJFnvEWJtbjxuhLXuAMAAABAObR+/XodPny4wFtReXt7O7ZTU1Odtk9JSZEk+fj4FPlcFSpUyJW0Z3fLLbdo/PjxkqTExERNnz49V5useIsSq1S8eF2JxB0AAAAAyqGaNWuqTp06Bd6KKiAgwLFdmPL3hIQESYUrVS+ORx55xJHcL1u2LNf+rHiLEqtUevGWFhJ3AAAAAHDCMGwybBa4lfKs8t7e3qpcubKkixO/5efs2bOOZDjr+vSSVq1aNUc8ec0wn/XlREJCgmJjYwvsK6sCoWrVquXq+naJxB0AAAAAkE3z5s0lSXv27FF6enq+7Xbu3OnYbtasWanFU1A5fVasl8ZzqfT0dO3du1dS6cZaWkjcAQAAAAAO3bp1k2Qfxd64cWO+7bKXrnft2rVUYjl16pRjebpatWrl2p8V66XxXCoiIsJRHVBasZYmEncAAAAAcMYwrHMrZYMHD3Zsz5gxI882mZmZmj17tiQpKChIvXv3LpVYpk6d6ljSrmfPnrn29+rVSxUqVJAkzZo1K9/l72bOnOnYHjJkSMkHWspI3AEAAAAADh06dFD37t0lSdOnT9eaNWtytZk8ebJ27NghSRo9erQ8PDxy7F+6dKkMw5BhGBoxYkSu4w8cOKDIyMgC4/jll1/073//W5J9FvgHH3wwVxtPT089/fTTkqQdO3bo3XffzdVmzZo1jhnpe/bsqfbt2xd4Xiu6KtZxBwAAAAAU3ocffqiuXbsqKSlJ/fr107hx49S7d28lJSXpm2++0dSpUyVJISEhGjNmTJH7P3DggHr37q3OnTtr4MCBatWqlapVqyZJ2rdvn+bNm6d58+Y5RtDfffdd1a5dO8++nn/+ec2dO1dRUVEaO3as9uzZo7vvvls+Pj4KDw/XG2+8ofT0dPn4+OiDDz4o3gPiYiTuAAAAAOCMzZBsFihYtpV+qbwktWnTRnPnztWwYcMUFxencePG5WoTEhKisLCwHEvIFdWaNWvyHNHP4uvrq/fff1+PPPJIvm0CAgIUFham/v37a/fu3Zo6darji4UsgYGB+uqrr9S6detix+pKJO4AAAAAgFwGDhyoLVu26MMPP1RYWJiOHDkiT09PBQcH64477tCTTz4pX1/fYvXdtm1bffnll1qzZo0iIiJ07NgxxcTEKD09XRUrVtQ111yjvn37atSoUY6R+IIEBwcrMjJSn3zyib777jvt2bNHqampqlu3rvr376/Ro0erfv36xYrVCkjcAQAAAAB5ql+/vt577z299957RTquV69e+U4UJ9lHye+77z7dd999lxuig5+fn8aOHauxY8eWWJ9WQeIOAAAAAM6U0YzuhYoDVx0LXKQBAAAAAADyQ+IOAAAAAICFUSoPAAAAAM7YbDIsMau8BWJAmeNZBwAAAADAwhhxBwAAAABnDJv95mpWiAFljmcdAAAAAAALI3EHAAAAAMDCKJUHAAAAAGcMQ7JZYA111nG/KjHiDgAAAACAhZG4AwAAAABgYZTKAwAAAIAThmGTYYEZ3a0QA8oezzoAAAAAABZG4g4AAAAAgIVRKg8AAAAAzthkjVnlGXq9KvG0AwAAAABgYSTuAAAAAABYGKXyAAAAAOCMYbPfXM0KMaDM8awDAAAAAGBhJO4AAAAAAFgYpfIAAAAA4Ixh2G+uZoUYUOYYcQcAAAAAwMJI3AEAAAAAsDBK5QEAAADAGZvNfnM1K8SAMsezDgAAAACAhZG4AwAAAABgYZTKAwAAAIAzhs1+czUrxIAyx7MOAAAAAICFkbgDAAAAAGBhlMoDAAAAgDOGIdkMV0dhjwNXHUbcAQAAAACwMEbcAQAAAMAZw7DGxHCMuF+VLPDKAwAAAAAA+SFxBwAAAADAwiiVBwAAAABnDMMaZepWiAFljhF3AAAAAAAsjMQdAAAAAAALo1QeAAAAAJyx2ew3V7NCDChzPOsAAAAAAFgYiTsAAAAAABZGqTwAAAAAOMOs8nAhRtwBAAAAALAwEncAAAAAACyMUnkAAAAAcMaw2W+uZoUYUOZ41gEAAAAAsDASdwAAAAAALIxSeQAAAABwxjAkmwXGPZlV/qpkgVceAAAAAADID4k7AAAAAAAWRqk8AAAAADhjGNYoU7dCDChzjLgDAAAAAGBhJO4AAAAAAFgYpfIAAAAA4Ixhs99czQoxoMzxrAMAAAAAYGEk7gAAAAAAWBil8gAAAADgDLPKw4UYcQcAAAAAwMIYcQcAAAAAZ2w2+83VrBADyhzPOgAAAAAAFkbiDgAAAACAhVEqDwAAAABOmIYh0wITw1khBpQ9RtwBAAAAALAwEncAAAAAACyMUnkAAAAAcMYwJMMC456Uyl+VLPDKAwAAAAAA+SFxBwAAAADAwiiVBwAAAABnDJtFSuUtEAPKHM86AAAAAAAWxoj7VSw25qDWLf5EuzcvVNzpI3Lz8FKlao3UvMNQdbj+H/Lw8r2s/v9aMVsLPh9VqLaDHp6m1t0fyHX/0h/+rWXzXyvSeXsOflm9bhtfpGNQvsWa6YpSsqLMZO02k7VbyYpXpiSpjxGoZ201SvycyzLj9IcZpwNKUYIyFSQ3XWP4aIARpFDDp1B9JJuZCjNjtdKM13GlKU2mqshd7Q1/DTSCVM3wKPG4Ub4dOn5K/5m3UL+t2aTok6fl5eGuhrVr6LbenfXIbTfJ19ur2H0nJqfo93V/6c8NmxW5a5/2HTmu80nJCvTzUXDdmurbobVGDbpB1StXLHSfCUnJ+vLXpfpp2TpFHYrW6XPxquDvp1pVKqlTi6bq37Wd+nZoVeyYceU7Fn1Y3375X61a9rtOHI+Wp6enatdtqL43DdId946St8/lfVbJzMzUgX1R2r51k7Zv2aTt2yK1Z9ffSktLlSR9Ousnte3QzWk/K5cu1o5tm7R9a6SOHjmgs2dO6/z5OPn6+qlWnQZq26GrBt85XPUbNrmseAFcvUjcr1K7In/Rj5+NUEpSnOO+tNREHd2/UUf3b1Tksi9075gFqlQ92IVRFk/lmiGuDgFl7P7MfWV2rhQzU29mHlOEEnLcf0rpWmrGa7kZr7uNyrrHVrnAfo6aqZqYGa2jSstxf7TSFG2e1WLznMbYaqiD4V/ivwPKp4WrIjTq1SmKS0hy3JeYnKKzO/dq0869mvXLEs17+0U1rlOzyH1v23NQ1z/+ss4nJefadybuvNb/vVvr/96tT779RVOef1RD+3Z12ueyTdv0j0mf6tDxUznuP3X2nE6dPafNu/dr9ZYdJO7I14rw3/TK2EeVcD7ecV9yUqLizkVqx7ZI/TTvS7332TeqW79Rsc/x609z9e8Xn7isONPT0zXmH3fnuS8+7px2bd+sXds369uvPtcjT72o4Q8/c1nngysZMi0xo7sVYkBZI3G/Ch07EKl5n9yn9NQkeXr7q9stY9WgWS+lpyVp29pvtWnpdJ0+vltzJg/SwxPXyssn4LLPOez5MAVUzP/DZGDFOnne3/76x9S8w20F9p2ZmaGZr/dVSlKcvHwCFdp20GXFivKtqtxVR56KVGKp9D/FPOFI2lvKRwNtFVVJ7jpopug784yOKU1zzNOqmOmmm2xBefaRaGbq39mS9huNCupuBMhLhraYiZpnnlGiMvV25jG9baurRoZ3qfwuKD82R+3X8FfeV1JKqvx9vDVm2BD1uO4aJaWkat6SVZr58xLtPnxMQ8dO0vJpbynAt3BVH1niEhMdSXvnFk11U5e2ui60sSoFBigmNk4Llq/TzJ//UFxCkka+OkWBfr7q16lNvv2FR2zRHS+8qeTUNAX5+2nkoBvUvc01qlqxghKTU7TrYLR+W71RJ8/GXs7DgivYru1b9NI/RyolOUm+vv4a/sgzatuhm1JSkrV44Q9a8N1sHTqwR/987G7NnLdEfn7F+6ximqZj293DQ42bNFdGepr2RG0vUj/+AYG6rkM3XdOyrWrXqa8qVWvI28dHp04e16b1K/XzD1/pfHycPn3v3woIqKDb7n6wWPECuHqRuF+FfvtqjNJTk2Rzc9ew5xeqbpNOjn0Nm/dWperB+mPuizp9fLfW/Pp+iZSdV67RREFVGxT5OL/AavILrFZgm92bf3NUDjTvcLs8PIv2gRXl391GJTUxvNVE3qpouOuEmaZRmftL/DybzUQtN+0jPx3kp3G2WnK78M17iOGtDqa/ns08qFNK10wzRt3MAPkbbrn6+cE8o+gLSfuDRhXdZqvk2Bdq+KiF6asXMw8rRaY+zzylSW51S/x3Qfny/JQZSkpJlbubmxa897I6XtvUsa9X2xYKrlNTL//nS+0+fExTvvlZLz10Z5H6txk23dans14ccYeaNcz9euvboZX6dWyje156RxkZmXrug+na/PVHMvIYeTp19pyGT/hAyalpatmkgX589yVVrxSUo03nlqEaMbCvUtPSch0PSNJ7b7yolOQkubm7a8q0eWrRpoNjX7tOPVS3fiN9/O4EHTqwR3NmfKKHn/xXsc7TsHFTjXnpTTW7to1CmrWQl5e3Pv/4zSIl7u7u7lq8Zq/c3HK/3zeT1KPPzbpz2CMaPrS34s7FaupHkzTojgfybA9Y0cGDBzVlyhSFhYXp8OHD8vLyUuPGjXXnnXfqiSeekK9v8S9ZSUxM1G+//abff/9dERER2rNnj86fP6/AwECFhIToxhtv1GOPPaYaNQq+7LFXr15atmxZoc6Z/Qu78oTJ6a4y0Xs36NCulZKkNj0ezJG0Z+ly87OqUitUkrRu8cfKSLf2B6stq750bLfqOsyFkcBV7rNVUQfDXxWN0v0u8sfMM5IkN0n/sFVzJO1ZKhhuGmFUkSQlKFOLzXO5+kg3Tf1ixkqS6spTg43c1ws3M3x0g1FBkrRNSYoyc5cv4+oRsX23Vm/eIUl64JY+OZL2LE/fPVBN69eWJP3nu4VKS08v0jk6tWiq2RP/mWfSnuWW7u11aw978rQv+oQ2R+X95diE/87RmXPx8vX20jdvjM2VtGfn6cE8Dsjt7y0b9dfGNZKkW28fliNpz3Lfg0+qQWP7pXFz//dfpRfzS6BrWrbVncMeUYvW7eXlVfzqJmdJeK069dX3psGSpLNnYnRwX1SxzwUXyppV3gq3MvLzzz+rZcuWeu+997Rr1y4lJibq7NmzioiI0NixY9WmTRvt2bOnWH1v2bJF1atX1+23367PPvtMERERio2NVXp6us6cOaO1a9dq4sSJatq0qebOnVvCv1n5Q+J+ldm5aYFju3WP4Xm2MWw2RwKcnBirAzuWlkVoxZKSFKedm36WJAVVbah6TZ1PIAMUR6KZqc2yX1vcSr6qks/EcZ2NAPleeGtdY57PtX+LEpWQbeI8Wz7XyvU1Ah3ba/PoB1ePX1ZscGzff3PvPNvYbDbde1NPSVLs+QQt3/R3qcTS47prHdv7jp7Itf9s/Hl9+4f9y+G7+nVXvRpVSyUOXNmWLVno2L5lyL15trHZbOo/yH5deXzcOUWsX1EmsV0OX7+Lc5akpKa4MBKgcCIjI3XXXXcpLi5O/v7+ev3117V69WotWbJEDz/8sCQpKipKAwYMUHx8vJPecouLi9P58/bPOF27dtWkSZP0+++/a9OmTVq0aJEeffRR2Ww2xcXF6b777tOvv/7qtM927dpp69atBd7KK0rlrzKHolZLkjy8/FSrwXX5tqsf2uPiMbtXq3GLG0o9tuL4e/33Sk+1J1Mtu96bZ9kmUBJ2K1npspdWXWvkXxLmYRhqKm9FKtF+jGnKPdvrcrt5cWKxawuYfb6JvOUlQykycxyDq8/qrTslSX4+XmrTNP9JuLq1bu7YXrN1Z6lM+paaenFU082W+7v/31ZtVFKKfTbuAV3bOe5PTE7RsZgz8vfxVrVKQbxXo0CbN66VJPn4+in0mtb5truuXRfH9pZN69Spa5/SDq3YkpOTtHyJPemw2Wyq16CxiyMCnBs9erSSkpLsl4MsXqzOnTs79vXp00dNmjTR2LFjFRUVpcmTJ2vChAlF6t9ms+nOO+/UK6+8oubNm+fa369fP918880aMmSIMjIy9NRTT2n37t0F/g/x8/PTtddem+/+8ozE/SoTc9T+AbBS9cayueX/9FepdbEUM+uYy7Fg2sOKORalxPgYefkEqlL1xmp0TR+16/OoAivVLna/W1ZSJo+ycdi8ODpSx/AssG0dw1ORZqIyJB1Vqurp4hJdh83Ui+2Ufz9uhqGa8tABpeqIUvNthyvfrgNHJEmNateQu3v+5bgh9S++l+46GF0qsazcfPG636b1c793r9++27F9TeN62rhjjyZ+/rWWbtyqzEz7F19VggJ1W5/OemH40ALL6HH1OnChjLxOvYZyd8//s0r9RhdXkTlgwdLz9LQ0xZw6oS2R6/S/6VN0+OBeSdLA2+4r9mR6cDHDsN9crQxiWL9+vVassFeyjBw5MkfSnmXMmDGaMWOGduzYoQ8//FAvvfSSPIpwCVSXLl3UpUuXAtsMGjRIt912m77//nvt3btXkZGRuu66/Acfr2SlXiq/bds2vfbaa7rxxhtVp04deXl5yd/fX02aNNHw4cO1du3a0g4BF6SnJisxPkZS/rO4Z/HxqygPLz9JUtzpI5d97gM7lul87DFlZqQp6fxpRe9drxU/vamPnm+miD8/L1afsacO6GCUvSSzbpMuqlSdb69RemJ08ZrhKk6+88y+P/txknT6ws/eMvKcuC5nP/Z/fueUoTQzs0jx4sqQnJKq0+fs5Ye1qxa8xGDFAH/5+di/JIo+GVPisWzdc0C/rdkkSbqmUT2FNsj9f2TngYv/L5Zv+lt9//Gy/tywxZG0S1JMbJym/rBIXR58Xlv3HCjxOFG+paQkK/bsaUlSteq1CmwbWCFIPr72zyonjpXOl1VFdTT6kDo2q6SOzSqpa8vqGtS3pf7vuYcVtcNentupWx89/cKrLo4ScG7+/PmO7QcfzHsVBJvNpgceeECSFBsbq/Dw8FKJpXfvi5eJ7d27t1TOUR6U6oj70qVLczzQWVJTU7Vnzx7t2bNHs2fP1r/+9S9NmjSpNEOBpJTki9eeeHr7OW3v6eWntJQEpaYU//raitUaKbTtYNUN7qjAyvZJj86e3KcdET9q+4YflJ6WrLCZT8gwDLXtPapIfW9e9ZV0YVbIVt3uL3aMQGEk6WLi7O3kO8/s+7MfJ0mJF3521ockeRuGLlTnK0mmmMbr6hOfePEyCT8f5xNn+Xp7KyEpJc/12C9HSmqannjrM2Vk2F+/rzxyT57tzsZd/H8x+t2pMgxp/MN3694be6papQrae+S4Pvz6J33561KdOBOru8e9rTUz3lWgX/FnJMaVJTHh4msoKykviLePr5ISE5SUmFCaYV22oIqV9fz/va3e/W5lNnmUCytX2gfH/Pz81LZt23zb9ezZ07G9atUq9evXr8RjSUm5WPV4Nf/9lGrinp6eLj8/Pw0YMEB9+vRRaGioAgMDdfLkSf3999+aMmWKDh48qDfffFMhISH5fpuDkpGedvGDnJt7waW+9jb2kZu01OJ9AAxtO0itut2f6zqU2o3a6dpOdyoqMkxzp9ypzIw0LfrqOTVtc4v8gwpe6iG7ravnSJLcPbx1TcehxYoRKKw0XRwxdJZAe+jiaz7VNJXtR0c/7nJe5pajH2XKPp89riYp2a4p9/Rw/i/b60Kb5JSSvbxizPvTtWmnfZTjvpt6qn+269ezS0y++P8iOTVNn7/0pO656eKHumYN6+qzcU/Iw8NdM376QwePndK0+Yv1z/sGl2i8KL9SUy6+hjw8nH9W8fS0t0lJscbqG9Wq1dScBfaEJyMjQydPHNPalUv00/df6q2JY3Tk8AGNeORZF0eJYrPZ7DdXK4MYduywr2YSHBxc4CUroaGhuY4padmXeWvWrFmBbXfu3KmOHTtq165dSk5OVpUqVdS2bVvdfvvtuueee4pUym81pZq4t27dWkeOHFFQUFCufTfeeKOefPJJ3XLLLfr99981ceJEPfBA4de0PHKk4PLtY8eOFSfkcivuTLSSE8/muc/bt6ICK9WWu8fF0ZqMdOcf6jLS7d9ueXgWb3kUb98KBe4PaTNAPQe/pPDvJygtNVGbls9Qj1tfLFTfR/as0+nj9mspm1430Om5gMuVPYl2tuhQ9iTf85IvrrL6SZfzNURz9MMiIFclL8+LHzBS05wv8ZZyoY23l/OEp7De/d+PmvnLEklS22aN9d4/86+O8vK8eN5rG9fPkbRnN+GRezXnt2VKSU3T90tWk7hfJU6eOKr4c7F57guoEKRq1WvJM9uSbGlpzj+rpKba21zOUm4lyd3DQ41DLk6yFdKshbr16qdBdzygx4ffqv+8/6oOH9yr/3v9YxdGiStJYXKeOnUKvkT2UsnJyYqJiSnUsRUrVpSfn58SEhJ0+PDhIp2nMDZv3qywsDBJUosWLZwm7idOnNCJExdXPYmOjlZ0dLR++uknvfXWW5o3b57TPqyqVBP3KlWqFLjf09NT77zzjlq3bq2DBw/qr7/+KrAUI7u6dfNfa/Zq9Oe88dq88n957mvV7X4NfmS6vLwvToSSmuy8pCw1xd7G08vfScvia9t7lMJ/mCiZpg7uXCEVMnHfnH1Sum5MSofS55MtcU5WwdebZ9/vc0nCnbVUnLM+JCnZvJi4+xRihB5XngDfiysPJBSi/D1rxNu/EGX1hTF9we+aMNVe3RRSv7a+f3tcgSX7Ab4X9/Vt3zLfdpUrBOi6po20Zusubd17QKlpaazpfhX47IPXFTb/6zz3DRh8j8ZP+iTHkmmFKX9PTkqUVLiyeldq0vQaPTb6Jb397+f0yw9zdEP/2yw9Cz7Kjw4dOjhtY5rOBwuyy760m7+/8zwgK3HPWtqtpKSkpGjUqFHKyMiQJL3++uv5trXZbOrbt6/69++vVq1aqXLlyoqPj9emTZv03//+Vzt27ND27dvVu3dvrV+/XvXq1SvRWMtCmQ7hpKSk6NChQ9q+fbu2bdumbdu25Xghbd68uSzDueq4e3rLx98+uVHc2YIrFpISzirtQuIeWLlo39IVhV9gNfleiCn+bOEmlslIT9Xf676zH1+huhq3KPlraYBLFTTh3KUKmsiu8oWfk2XqvJnhpB/72H4FucnDYMT9auTt5alKFexfukafOl1g27Px55WQZK+Uql2t4C/OC+PbP1bq2ffsk4fWq1FVP7/3f6oSFFjgMdnPW7t6wTFktc3MNHNcG4+rm5eXtyoEVZJkH6EvSNy5WEdyX71m8VeoKSs9+t7s2P5z0U8ujATFZRqGZW6lKTnbZU+ens4ruLy87JfXJiWV7PK1Tz75pCIiIiRJw4cP18CBA/Nt+8MPP+iPP/7QP//5T/Xt21etW7dW9+7dNXr0aG3evFnDhw+XZB+Rf+aZZ0o0zrJS6svBJSQkaMqUKfrmm2/0999/O74xyUtWSUZhOCvFOHbsWKG+gbpSDH5kugY/Mt1pu6q1m+nQrpU6c2KvMjPS810SLuboLsd2lVqhebYpOUV784mKDFNSwhlJUovO98hm47pflL66hpdjorgjZmqBL9sjF5Z8c5NU65Il3+oanhf7UapClfda7hmmqeMXEveClo3DlS+0QR2t3rxD+6KPKz09I98l4aKyLQGX11JtRRG2coMeee1jZWaaqlG5on75YLxqVyt4VnvJfg37j+FrJEmZGQVXlWRkXtx/NU82dDUZP+kTjZ/0idN2DRs31V8b1+jIof1KT0/P9/rag9mWgGuQbWk4q6pY8eKXWcePlnxJMa5O69evV82aNUu0T2/vi9VTWZejFCRr8jgfn7w/0xTHpEmTNG3aNElS+/bt9cknBb935HVpdhYPDw9NmzZNa9eu1a5du/Tjjz8qOjpatWtb/wu/7Eo1cT9w4ID69Omj/fv3F6p9Ub6lKeq1GrCrF9JFh3atVFpKgo4e2KQ6jfP+cuPgzuUXj2lS8PqKlyMh7pQSz9u/sAkIKnjZlyybV2Uvk7+vVOICLtVE3nKXoXSZ2mYm6g5VyrNdmmlql5IvHnPJt+LNDR9H4r7NTFKokfc/ud1KVvKFhs3zaYOrQ5cWoVq9eYcSklIUuWuf2l/TJM92K/+6uMZ65xbF/8I1PGKrHnjlfaVnZKhShQD99P7/qVHtwk0c2rXVxesG9x89UUBLaX/0cUmSt6eHKgWW3iVZKH9ate2kvzauUVJignb+/ZeubZX3ZIibIlY7tlte17Gswiu2kycvXots9dJ+5MMwJCtUwGX7bFGzZs0Sz4sCAi5eXluY8veEBHvlS2HK6gvjv//9r8aNGyfJPvndwoUL5ed3eX8z7u7uGjlypMaOHSvJPuHdvffee9mxlqVSfeXdf//92r9/vwzD0EMPPaTFixfr8OHDSk5OVmZmpkzTzDECX9TrL1B0odcNcmz/tXxWnm3MzExHcuztG6QGzXqVWjwbl05zLOlWP7S70/aJ8ae1e/NvkqTq9VqqRr1WpRYbkJ2vYVOrC6Pjm5WoGDPvKerWmPGOJd86G7n/gbWQr/wuvPX+acbl+763xIxzbHfKox9cPW7p3t6x/b9f814jNzMzU3N+s8+6G+Tvpx7XXVOsc63dukt3j3tLKalpquDvqwWTX1LzhoWfU6Zbq2aOcvpfV2/Mt8ruwNET2nJhDfdOLUJls8IszbCMnn37O7Z/+XFOnm0yMzO1cME3kqSAwApq18H5ZwhXW/LbfMd2cLYJ7ACr8fb2VuXK9iorZxOCnz171pG4l8QcZF9//bUef/xxSVL9+vX1+++/O503rbCaN7/4dxcdXbhLdK2k1P5T7ty507H+37hx4zR9+nTdcMMNqlOnjry8vBxLhJ05c6a0QkAeajdur3pNu0mSIpfP0OHda3O1Wf3r+4o5ulOS1LHfk3Jzzz1h0IEdyzTxAU9NfMBT86eOzLU/9tQBHTsQWWAsUZFhWj7fPsmEu6ePWncf7jT+bWvnKjPDnjC16sqkdCg5f2Se08CMKA3MiNKczLwv2xlis4+yZ0j6LPOkMi5Jus+ZGZpp2o/1k039jNyrHXgYhm4xgiRJh5WqH83cq0HsNJP0u3lOknStfBRiWGO2ZLhGu+ZN1OXCSPbsX/7Uum27crWZ8s3P2nWhVP4fd/SXxyWlxcsj/5Z/9zvk3/0OPZrPbNZbdu/X0LGTlJCUIj8fL81760W1adq4SLG6ublp9N23SpIOHT+lN2d9n6tNenqGnn1vmjIz7X8/IwfdUKRz4Mp3Tcu2at22syTpp++/1NbI9bnafDXjYx3Yay+Vv+v+R+Wex+SGG9evVMdmldSxWSX9+8UnSi3eZX+EKebk8QLbRG5YrS8+fVeS5Oburn4Dbi+1eICSkJXk7tmzR+np+c/ts3PnTsf25c7W/tNPP+mBBx5QZmamatasqSVLlpRoNcGlS1SXN6VWKv/33387tu+6665822VNOICyc9N9k/XFa72UnpqkL9/pr24DX1DDZr2Ulpqkbeu+1aZw+/UklWs0Ueebi7fWaGzMQc2adIPqBHdSSJsBqlGvpfwCqkmSzp7ap+0bftD2DT84Rtv73f2WAis5v84kqxLA5uauFl3uKVZsuPL8bSbpmHnxGqw4XRzlO2am6o/McznaX28r3vKBrQxf9TACtNyM1zolaHzmEd1qq6hKctdBM0Xfmmd06sLEdCOMKvI38r5u9zajklaa8YpWmmaYMTqamaYeRoA8ZWirmaTvzNPKkOQpQw/bqhYrVlxZ3nn6QV3/+MtKSknVoH++pufuv009rrtGSSmpmrdklWb89IckqUndmnr67vwn78nPvujjGjTmdcWet4+ajB91jwL9ffX3vkP5HlO1YgVVq5j7b+kfQ2/W93+u0l9R+zVpxnfafeio7ru5p6oGVdC+oyf0ybe/aN02e8J1Y6c2GtyrU5HjxZXvn+Mm6eH7blZKcpKeHjVUwx99Vm07dFNKSrJ+X/iD5n9rrxqs1yBY9z54eUn5paP6UTu3ObbXrliiY9EX/w7q1Guk1m1zvmaXLVmol/45Ul179lO7Tj3UqEmoAgIqKDU1RdGHD2hF+G9a8tt8ZV6Y12HkP55X/YZ5X/ICazMNm0wLlMqXRQzdunXTihUrlJCQoI0bN6pjx7wvR8m+xnrXrl2Lfb4lS5bozjvvVHp6uipXrqzff/9djRsX7ctjZ7Zvv3hJWa1ahbtE10pKLXHP/s1MVvlEXj777LPSCgH5qNmgjYY+8ZV+/GyEUpLi9Od3/5erTeUaTXTvmAXy8gnIo4fCO7JnrY7syT2qn8XD01c33veu2vbOf13gLDFHd+roPvsXPY2vvUH+FapfVmy4ciw2z+nPbKXl2e1QsnaYOZfRul7FS9wl6WmjuhLNTEUoQVuUpC2ZOefmsEm6y6ikm2xB+fbha9g03lZbEzOjdVRpWmSe0yIz55cLvrJpjK2GGjHaDkmtQhpq1sRnNerVKYpLSHIs0ZZdk7o1Ne/tF3MsIVdYqzfv0KmzF1+DL3w00+kxLz54h1566M5c93t7eWreWy/qjn+9qchd+zRvySrNW7IqV7sbO7XRzInPlvsREJSOps1b6vX3puuVsY8q4Xy8/vP+q7na1GsQrPc++0Z+fpf3WeXVcU/mu2/2tA9z/Dxg8D25EnfJvub80j9+0dI/fsm3Ly9vHz02epzuHVF6o/9ASRk8eLAmTZokSZoxY0aeiXtmZqZmz54tyT45XO/evYt1rtWrV2vQoEFKSUlRhQoVtGjRIl1zTfEu+cpPenq6vvjiC8fPPXr0KNH+y0KpJe5Nmlz8JnHmzJnq1Cn3m9x//vMfLViwoLRCQAGatrlFj72+UesWfazdmxcq7ky03Nw9Val6YzXvcLs6XP+4PLx8i91/zQbXachjM3Vkzzod3b9R52OPKzE+RpmZ6fL2q6hqtZurYfPeuq7XQ/ILrFaoPjev+sqx3bIrk9LBNbwMm15xq62lmXFaYsbpgFJ0XpkKkpuuMXx0ixGU74Rz2dUyPPWhrb7CzFitNON1TGlKl6kqclc7w0+3GhVVzWBda1zUv2s7rZ05WZ9+F6ZFazYp+tQZebq7q1GdGhrSq7Mevf0m+Xp7uTpMSVKNKhUV/tkbmhX2p777Y6V2Hjiic+cTVCkwQO2aB+u+m3vp1h7Wn0wMrtW99036av5Kzf3fZ1q17HedPHFUHh4eqlOvkfreOEh33DdK3j7F/6xSUp58boLatO+iyIjV2rd7p86cPqmzp2Nk2GwKrBCkRsGhatexh/oPuktVqhVuokfA1Tp06KDu3btrxYoVmj59uoYPH67OnTvnaDN58mTt2LFDkjR69Gh5XHLJytKlSx3J/PDhwzVz5sxc5/nrr780YMAAJSQkyM/PT2FhYWrbtm2RYg0PD1ebNm3ynVk+LS1NDz/8sCPWgQMHlsj1+GXNMEtpRjjTNNWyZUtt22YvN7rzzjt1//33q2bNmjpy5Ii+/PJLzZs3T127dtWqVfZv4l955RVNmDChRM5/5MgRxxPy7Af7FFiJWehxZWr34LWuDgEoVb2Xvu7qEIBS9XeVvq4OASgVJ45H69beLSTZl3Iuj6tCZc8pdv70hWpXK5mJ0i5H9MkYhd76kKTSfVwjIyPVtWtXJSUlyd/fX+PGjVPv3r2VlJSkb775RlOnTpUkhYSEKCIiIsds9JLzxH3v3r3q0qWLTp48KUl6//33df311xcYU7Vq1VStWs5BvxEjRuj777/Xrbfeql69eqlp06YKDAzU+fPntXHjRk2dOtVRJl+tWjWtXbtWDRs2LPbj4iqlNuJuGIb+97//qU+fPjp79qy+/fZbffvttznatGjRQt999125vMYAAAAAAK5Ubdq00dy5czVs2DDFxcU5lmjLLiQkRGFhYbmS9sJYsWKFI2mXpGefdT63Vn4DvefPn9ecOXM0Z07eK1FI9tzzm2++KZdJu1TK67i3bt1af/31lyZNmqRff/1VR48eVUBAgIKDg3XnnXfqiSeekLc3128CAAAAgNUMHDhQW7Zs0YcffqiwsDAdOXJEnp6eCg4O1h133KEnn3xSvr6uvWTlhRdeUOvWrbVmzRpt375dp06d0pkzZ+Tl5aXq1aurXbt2Gjp0qIYMGSI3t7wnDi4PSq1U3tUolcfVglJ5XOkolceVjlJ5XKmutFL5HT/NVO3qFiiVPxGjZreOkFR+H1cUnevXMwAAAAAAAPkicQcAAAAAwMJK9Rp3AAAAALgiGIb95mpWiAFljhF3AAAAAAAsjMQdAAAAAAALo1QeAAAAAJwxDMmwwLgnpfJXJQu88gAAAAAAQH5I3AEAAAAAsDBK5QEAAADACdMwZFqgTN0KMaDsMeIOAAAAAICFkbgDAAAAAGBhlMoDAAAAgDOGzSKzylsgBpQ5nnUAAAAAACyMxB0AAAAAAAujVB4AAAAAnDBlyJTrZ3S3Qgwoe4y4AwAAAABgYYy4AwAAAIATpmGTaYGJ4awQA8oezzoAAAAAABZG4g4AAAAAgIVRKg8AAAAAzrCOO1yIZx0AAAAAAAsjcQcAAAAAwMIolQcAAAAAJ0xDMg3Xr6Fuuj4EuAAj7gAAAAAAWBiJOwAAAAAAFkapPAAAAAA4YRo2mRaY0d0KMaDs8awDAAAAAGBhJO4AAAAAAFgYpfIAAAAA4JQhWWBWeckKMaCsMeIOAAAAAICFkbgDAAAAAGBhlMoDAAAAgDMWmVVeVogBZY5nHQAAAAAACyNxBwAAAADAwiiVBwAAAAAnTBkyLTCjuxViQNljxB0AAAAAAAsjcQcAAAAAwMIolQcAAAAAJ0yLzCpvhRhQ9njWAQAAAACwMBJ3AAAAAAAsjFJ5AAAAAHDGkGRYYEZ3C4SAsseIOwAAAAAAFsaIOwAAAAA4Ycom0wLjnlaIAWWPZx0AAAAAAAsjcQcAAAAAwMIolQcAAAAAJ0zDkGmByemsEAPKHiPuAAAAAABYGIk7AAAAAAAWRqk8AAAAADhhGjaZhuvHPa0QA8oezzoAAAAAABZG4g4AAAAAgIVRKg8AAAAATpgyZMr1M7pbIQaUPUbcAQAAAACwMBJ3AAAAAAAsjFJ5AAAAAHCCWeXhSjzrAAAAAABYGIk7AAAAAAAWRqk8AAAAADhhGpJpuH5Gd9P1IcAFGHEHAAAAAMDCSNwBAAAAALAwSuUBAAAAwClDpqxQp26FGFDWGHEHAAAAAMDCSNwBAAAAALAwSuUBAAAAwAnTsMk0XD/uaYUYUPZ41gEAAAAAsDASdwAAAAAALIxSeQAAAABwwrTIrPJWiAFljxF3AAAAAAAsjBF3AAAAAHDClEUmp2Ps9arEsw4AAAAAgIWRuAMAAAAA8nTw4EGNGTNGoaGh8vPzU6VKldS+fXu98847SkxMLLHz/PrrrxoyZIjq1KkjLy8v1alTR0OGDNGvv/5a6D7S09P12WefqXv37qpatap8fHzUuHFjPfroo/r7779LLFZXoFQeAAAAAJy4Gien+/nnnzVs2DDFxcU57ktMTFRERIQiIiI0bdo0hYWFKTg4uNjnyMzM1COPPKLp06fnuD86OlrR0dGaP3++Ro0apf/+97+y2fIfd46JiVH//v21YcOGHPfv27dPU6dO1axZs/Txxx9r1KhRxY7VlRhxBwAAAADkEBkZqbvuuktxcXHy9/fX66+/rtWrV2vJkiV6+OGHJUlRUVEaMGCA4uPji32el156yZG0t2nTRl9//bXWr1+vr7/+Wm3atJEkTZs2TS+//HK+fWRkZGjIkCGOpP22227Tr7/+qnXr1mnKlCmqVq2aUlJS9OijjxZpBN9KGHEHAAAAAOQwevRoJSUlyd3dXYsXL1bnzp0d+/r06aMmTZpo7NixioqK0uTJkzVhwoQinyMqKkrvvvuuJKldu3Zavny5fHx8JEnt27fXrbfeqp49eyoiIkLvvPOOHnrooTxH92fNmqWVK1dKkh5//HF98sknjn0dOnTQzTffrLZt2youLk5PP/20duzYIXf38pUKM+IOAAAAAE6YhiHTsFngVvql8uvXr9eKFSskSSNHjsyRtGcZM2aMmjVrJkn68MMPlZaWVuTzfPDBB0pPT5ckffTRR46kPYuvr68++ugjSfbr199///08+8lK/itVqqR33nkn1/7g4GC9+OKLkqQ9e/boxx9/LHKsrkbiDgAAAABwmD9/vmP7wQcfzLONzWbTAw88IEmKjY1VeHh4kc5hmqYWLFggSQoNDVWnTp3ybNepUyc1bdpUkrRgwQKZppljf1RUlHbs2CFJuvPOO+Xr65tnPyNGjHBsk7gDAAAAAMq1rLJzPz8/tW3bNt92PXv2dGyvWrWqSOfYv3+/jh49mqufgs4THR2tAwcO5Bmrs35q1KihkJCQYsVqBSTuAAAAAOBE1qzyVriVtqwR7ODg4AKvBQ8NDc11TGFt3749z36Kep7i9HP48GElJCQUOlYrKF9X5AMAAAAAJEnHjh1z2qZOnTpF6jM5OVkxMTGFOrZixYry8/NTQkKCDh8+XKTzHDlypNAx1q1b17F96XmK049pmjpy5IijBL88IHEHAAAAgHKoQ4cOTttcek24M9mXdvP393faPitxP3/+fKmdx8/Pz7F96XlKqh+ruyoS92uCDVWpXvolJYAr9F76uqtDAEpVeK+XXB0CUKr++t9gV4cAlIpzp50nfeWJfVZ51+cUpR1DcnKyY9vT09Npey8vL0lSUlJSqZ0n6xx5naek+rG6qyJxBwAAAIArzfr161WzZs0S7dPb29uxnZqa6rR9SkqKJOVayq0kz5N1jrzOc2k/2X8uSj9WR+IOAAAAAOVQzZo1i3wNuzMBAQGO7cKUk2dN8laYsvrinif7RHKXnufSfgpK3Avqx+qYVR4AAAAAnDBNwzK30uTt7a3KlStLyjnxW17Onj3rSIazTyBXGNm/cHB2nuwT0l16nuL0YxhGiX/hUdpI3AEAAAAADs2bN5ck7dmzR+np6fm227lzp2O7WbNmxTrHpf0U9TzF6adu3bo5JqorD0jcAQAAAAAO3bp1k2QvLd+4cWO+7ZYtW+bY7tq1a5HO0bBhQ9WqVStXP3lZvny5JKl27dpq0KBBnrE66+f48eOKiooqVqxWQOIOAAAAAE7ZZFrgVhYp3ODBgx3bM2bMyLNNZmamZs+eLUkKCgpS7969i3QOwzA0aNAgSfaR8LVr1+bZbu3atY6R8kGDBsm4ZFb9kJAQxyj8t99+q8TExDz7mTlzpmN7yJAhRYrVCkjcAQAAAAAOHTp0UPfu3SVJ06dP15o1a3K1mTx5snbs2CFJGj16tDw8PHLsX7p0qQzDkGEYGjFiRJ7neeaZZ+Tm5iZJeuqpp3It0ZaUlKSnnnpKkuTu7q5nnnkmz36ee+45SdKZM2c0duzYXPv37t2rSZMmSZKCg4NJ3AEAAAAA5d+HH34oHx8fpaenq1+/fpo0aZLWrl2r8PBwPfroo44EOSQkRGPGjCnWOUJCQvT8889LkiIiItS1a1fNnTtXERERmjt3rrp27aqIiAhJ0vPPP68mTZrk2c/w4cMd5e+ffPKJhg4dqkWLFmn9+vX6+OOP1aVLF8XFxclms2nKlClydy9/i6uVv4gBAAAAoIyZMmSqdGd0L2wcZaFNmzaaO3euhg0bpri4OI0bNy5Xm5CQEIWFheVYkq2oXn/9dZ08eVJffPGFIiMjdffdd+dqM3LkSL322mv59uHm5qb58+erf//+2rBhg77//nt9//33Odp4eXnp448/1s0331zsWF2JEXcAAAAAQC4DBw7Uli1b9OyzzyokJES+vr4KCgpSu3bt9NZbbykyMlLBwcGXdQ6bzabp06crLCxMgwYNUq1ateTp6alatWpp0KBBWrhwoaZNmyabreDUtUqVKlq9erU+/fRTdevWTZUrV5a3t7ca/X979x0dVbn1cfw36Z0kdEikI0UQhFAU6SpSpIiCoKKIYBevIpar4rWivuq1XeGCggqooBSFKwhClKL03gLSEiKQQHoyKXPeP4ZMElMmpM0Bvp+1Zq2TnGee2WENydnz7LOfxo11//33a8uWLRo3bly5YnUlVtwBAAAAAEVq0KCB3n33Xb377rsX9LyePXvKMIxSj+/fv7/69+9/oeEV4OHhoQcffFAPPvhgueYxIxJ3AAAAAHDiciuVh7lQKg8AAAAAgImx4g4AAAAATrDiDldixR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJwyZo0y99H3acSlhxR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJwzDIsMwQam8CWJA1WPFHQAAAAAAEyNxBwAAAADAxCiVBwAAAAAnDFlM0lXe9TGg6rHiDgAAAACAiZG4AwAAAABgYpTKAwAAAIATlMrDlVhxBwAAAADAxEjcAQAAAAAwMUrlAQAAAMAJSuXhSqy4AwAAAABgYiTuAAAAAACYGKXyAAAAAOCURYZhhjJ1M8SAqsaKOwAAAAAAJkbiDgAAAACAiVEqDwAAAABO2GSRzQRl6maIAVWPFXcAAAAAAEyMFXcAAAAAcIJ93OFKrLgDAAAAAGBiJO4AAAAAAJgYpfIAAAAA4IRhmGMfdzPEgKrHijsAAAAAACZG4g4AAAAAgIlRKg8AAAAAThgyR0d3w9UBwCVYcQcAAAAAwMRI3AEAAAAAMDFK5QEAAADACbrKw5VYcQcAAAAAwMRI3AEAAAAAMDFK5QEAAADACUMWk3SVd30MqHqsuAMAAAAAYGIk7gAAAAAAmBil8gAAAADgBF3l4UqsuAMAAAAAYGIk7gAAAAAAmBil8gAAAADghCHJ5uogZI8Dlx9W3AEAAAAAMDESdwAAAAAATIxSeQAAAABwgq7ycCVW3AEAAAAAMDESdwAAAAAATIxSeQAAAABwwpBFhlxfpm6GGFD1WHEHAAAAAMDEWHEHAAAAACdoTgdXYsUdAAAAAAATI3EHAAAAAMDEKJUHAAAAACdoTgdXYsUdAAAAAAATI3EHAAAAAMDEKJUHAAAAACdshv3hamaIAVWPFXcAAAAAAEyMxB0AAAAAABOjVB4AAAAAnKCrPFyJFXcAAAAAAEyMxB0AAAAAABOjVB4AAAAAnDEkwzBBmTpd5S9LrLgDAAAAAGBiJO4AAAAAAJgYpfIAAAAA4IRh2B+uZoYYUPVYcQcAAAAAwMRYcb+MnY49ph+//khb1v1PcadOyNPLW3XqN9Z1N9ym/rc9KG8fv3LNf+LIPu3c9IsO7d2sY4d2K/HcGSUlxMnNzV3BobXVtFVHde83Up26D5LF4rzRR052tn5ePFORP81TzNEDykhPUUiNerq6U28NHPGIrmjSulzx4tJw/K8z+s+CZfppw1bFnI6Xt6eHGtWvo2G9umr8sH7y8/Eu89xpGVb9/Md2/bJph7Yd+FN/Rv+llPQMBfn7qml4XfXp1E7jBt+g2tVDSj1nanqGvvrfGi2J/EMHj8coPjFZ1QL8Va9GqLq0uVL9r+uoPp2uLnPMuPglGNk6qAwdNDIUZWQoShlKlk2S1NsSpCfc6lT4a0bakrTSSNJRWZUqm4LlrtYWXw2wBKuFxbdUc2QYNi01ErTWSNZfylKWDNWQhyIsARpkCVYti2eFx41LT8KZY9qw/CMd3PY/JZ49IQ8Pb4XWbqyrOt+mTjc+KC/v8l2rbI2crYXTxpVq7NAJM3RNjzFFnpv5Sh8d3fdrqeZ5ZW5WqeMDgFwk7pepjb/+qPdfHKO01CTH96wZaTqUtEWH9m3Rz4s+0wvvL1bd8KZlfo0Fn72hyJ/mFXnu1MkjOnXyiNatnK/W13TX5KnfKii4erFzJSXE6ZXHBylq7+aC88T8qRUL/9TqpV9q/KR/64Yh95U5Xlz8lq3brHGvfKCk1HTH99IyrDq3/7C27j+s2T+u0oK3nlWTsLoXPPfuQ8fU96F/KiU9o9C5s0kp2rgnShv3ROnjb3/UB5MmaHif65zOGbl1tx584xMd/+tMge+fOZeoM+cStSPqiNbv3Efifpm7y/Znlb2W1bDpTVusNiu1wPfPKFtrjGT9aiRrpKW67nAr/ve1JJ00MvWyLUYnVTBBiVGWYoxzWmEk6km3OupkCajwnwGXjv1bftSCT8bImp53rZJlTVPMn1sU8+cWbV7zme6atFjV65T9WgW4EDZZZJPru8qbIQZUPRL3y9CfB7bpnedGKdOaLh+/AN16z2S16dBDmdYM/bbiG/28aKZOHj+oVyYO1v998bt8/QPL9Dpu7h5qflUntWh7rRo0vUoh1esoKKSGUpMSFH1sv5Z//18dP7xHe7b+qtf+MURvzIiUm1vhuzdycnL0xqThjqS9S6+hunHIfQqoFqKDuzdq/mdvKPHsaf3njYcUWrO+OlzXr1z/Prg47Th4RGNeek/p1kwF+ProyTuHqvs1rZVuzdSCVes064dVijoRq+FPv6FfZ0xVoF/pVg1zJaWlOZL2rm2uVL9rO+iaFk0UGhSouIQkLf71D836YaWSUtN13ysfKMjfTzd2aV/sfKs379Rtk99URmaWggP8dd/gG3R9+9aqGVJNaRlWHTgWo5/Wb9Hpcwnl+WfBJaamPBQmL21TWqXM/4FxypG0t5WvBrmFKFQeOmZYNd84q1hlaa4RrxCbu/q5BRc5R5ph07/yJe03WarpekugvGXRTiNNC4yzSpNNb9li9ZZbuBpbfCrlZ8HF7eTRbfr2w1HKykyXl0+Aut8yWY1a9VB2VoZ2rf9Gm1fPVHzsQX359mA9+Orv8vYt27VKfmOeWabAkOI/2A0KDXM6R/3GHTR0woxyxwIAf0fifhma8X//UKY1Xe7uHpry4TK1aNvVca5tRC/Vu6KZZn/wjE4eP6hFc97THeNfLNPrPPLP6XL3KPotdnXnPup36wN6+9k79PvqhTqw63dt/m2pOvUYVGjs6qVfaN/2dZKkm4c/oAmTP3Sca966kzpc209P3tVZaalJmvF/T6hd577Fvi4uXZM++Fzp1kx5uLtr8bv/VOerrnSc69mhjZqG1dU///OVok7E6oOvf9DzY2+/oPndLG4a1rurnr3nNrVsFF7ofJ9OV+vGzu11x/NvKyfHpqfen6kd8z4s8jaQM+cSNWbK+8rIzFLbZg218J3nVTs0uMCYrm1b6J5BfZSZRUnl5W6kJVTNLD5qJh+FWDx0ysjSONuRCn+dHUaafjWSJUmd5K/n3OrJ/fz7t7nFR52MAD1hO6YzytYsI07djEAFWNwLzfO9cVYx55P2ey01NMwt1HGuhcVXbQw/PWs7IasM/dd2Rm+4F/7/BCz74h/KykyXm7uHxjyzTFc0z7tWady6l6rXaabl855RfOxBrVv6nnoPL9u1Sn7V6zZTSM2G5ZrD09tftcOvKncswOUgLS1NH330kebPn6/Dhw/LarUqPDxcAwYM0GOPPaYGDRqUa36bzaa1a9fqp59+0vr167V//36dPXtWPj4+uuKKK9S9e3c98MADatu2bYnzTJkyRS+//HKpXnP16tXq2bNnueIuDs3pLjMH92zU3m1rJUl9B99bIGnPNXj0Ewpr1FKS9OPXHyo7u2yJg7Pk2d3dXUPv+ofj673b1xY5btFX70mSAquF6p7HpxY6Xze8qW69Z7IkKfbEIf2+ZlGZ4sXFa/PeKK3fsU+SdPfA3gWS9lyPjRykKxvUlyT9Z/4yZWVnX9BrdGlzpb54+R9FJu25Bl4foVu6d5Ik/RlzSjsOFp1cTZk2V2cTk+Xn462vX3+6UNKen5cn9wFf7ka71VAnS4BCLJX7geRC21lJkrukB91qOZL2XNUs7rrHUkOSlCqbVhiJhebINgz9aCRIksLlpSGWwv0eWlp8dYOlmiRpt9J10Ch8+wkub9GHNurYfvs1QYee9xZI2nNdO+AJ1axvv1bZ8NOHyinjtQpwIQxZZBgmeFwCpfKHDh1Su3btNHnyZG3evFnnzp1TWlqaDhw4oHfffVdt27bVjz/+WK7XaNiwoXr06KE33nhDkZGROnXqlLKyspScnKw9e/boP//5j9q3b6/JkyfLuAha9ZO4X2b+WLPEcdxnUNENVtzc3NSr/52SpNTkBO3avKbS4vH1yytty8wsfPEWc+ygoo/YE7Lr+g4vtmFe74F3O45/X7O4gqOE2f342ybH8V039ypyjJubm0b16yFJSkhJ1a9b91RKLN2vyVtp+fPkqULnzyWn6NuV9gvSETderyvq1KyUOIALkWbYtEP23hBXy081imkc19USKL/zlw4bjJRC53cqTan5Gue5FdN4tI8lyHH8exHz4PK2b3PetUr7YprBubm5qd319muVjLQEHdm7pipCA1ABkpOTNWDAAEVFRUmS7r//fq1atUrr16/Xa6+9poCAACUlJWnEiBHavn17mV/n5MmTkqSmTZtq8uTJWrJkiTZv3qzffvtN//rXvxQSEiKbzaa33npLzz//fKnm3LVrV4mPiIiIMsfrDPXEl5l9O+wl5z6+/mrSokOx41pfc73jeP+O9Wrf5YZKiee3Fd86jsMaFF4lzY3XHlP3YucJqVFH9a5orpPHD2r/jvUVGyRMb/2u/ZIkf19vtb+ycbHjurVr5TjesGt/pTR9y8zMW/VxL6Jnw0/rtijdmilJGnBdR8f30zKsio07qwBfH9UKDS7VTgtARYlShrJlX224ylJ8l25Pi0VXykfblGZ/jmHII997da+R1xjyqhK6zzeTj7xlkVVGgecAknTsoP1vv5e3v+o1Kv5apWGLvGuVYwfXq2nbyrlWAVCx3n77bR08eFCS9NZbb2nSpEmOc127dlXPnj3Vo0cPpaWlaeLEiVqzZk2ZXqdTp0566aWXdOONNxa6rurWrZtGjRqlrl276syZM3r77bc1btw4NW5c/HWkJF11letuhSFxv8xEH7EnOHXCmpRYyh7WsIXj+MTR/RUaQ1JCnE4ej9LPiz/TLz/MliQFBddQ95tHFRp74s99juP6RST2+YU1vFInjx9U3KkTykhPlY+vf4XGDfM6cDRaktS4fh15eBS+5zZX8/Ol8pJ04FhMpcSydsdex/GV+V4v18a9UY7j1k2u0JZ9h/Tyf+dpzZZdstnsiVON4CAN691Vk8cML7GMHqgoJwyr4zjM4lXi2DCLl7YZacqRdFKZukJ5WyyeMDLzxqn4edwtFtWVp44qU9HKLHYcLk9nYuzXHaF1msjdvfhrlZr18q5Vcp9THgunjVPcyYNKS46Tt2+QQus0UZOr+qhT3wkKCi38+7wocScPaNoL1you9qCyszLkF1hD9Rpdo1YRQ9X22pFy9+D2p4uZYdgfrmaGGMoqKytLH3zwgSSpZcuWevLJJwuNufbaa3Xfffdp2rRpioyM1KZNm8q0kr1+fcmLeU2aNNGLL76oRx99VNnZ2Vq0aJH+8Y9/lPgcV6qUUvm0tDQFBgbKYrFo9OjRTsdv2LBBFotFFotFn3zySWWEBEmZ1gwlJcRJkmrULrkzakBQiCPxjT91otyv/fyEPhoS4akhEZ66+4a6eua+7lq1ZJYMw1BQcA098/Z8BQQGF3pe/Om85MpZzLnnDcNQ/KnocseMi0OGNVPxifaGWvVrlrxFVUhggPx97UlGzOm4Co9l16Gj+mnDVklS68ZXqEXDwu/Z/Ufz3pu/bt2jPg/+U79s2ulI2iUpLiFJ079frmvvnaRdh45WeJzA38Upr+dDDSef6ec/n/95khR//msfWYpsXFdwHnsCk6gcZRm2C4oXl66szAylJdt/Pzvr4u4bECIvb/u1SlJ8+a9VjuyNVHJCrHJyspSWEq/oQxsVuegNvfdEC21aNb1Uc6QknlL04U3KSEtUdpZVSWdjtH/LD/r+07H6+NmOOh2zz/kkwCVs9erVSky090gZM2ZMkTtKSdI999zjOF64cGGlxdOrV94tlocPH66016kIlbLi7ufnpyFDhuirr77S4sWLlZqaKn//4lc/58yZYw/Gw0O3335hnZ5ReulpyY5jH1/ne+d6+/orIz1V6WmVd//hwBGP6PZxzysouEaR5y8kZu98K+zp6dwzeblITssrs/X3db6tlJ+Pj1LTrUXux14e1swsPTz1U+Xk2BOQl8bfUeS4c0l5783H35kui0V68f6RGnVTD9UKrabD0X/p3/OW6Kv/rdGpswka+dxb2vD5OwryL758GSivdOUlzj5OPtPPfz7/8yQp7fzXzuaQJB+LReer85UuQ6xDQpIyM/L+7nt5O79W8fT2V6Y1VVZr2f/uh9RqrFYRQxTerIuqVbd/WHDu9BHt2bhQezd+p+ysDC2Z+bAkiyL63F/kHBaLmxq37q3m7fqpToOr5RcQKmtGimKPbNWmX2boTMw+nYnZq89fvUETXlmv4BpXlDleuI4hczSGM0MMZbV2bV4z6h49ehQ7rmPHjvLz81NaWprWrVtX7LjyslrzKs7c3Uv+wNnVKq1UfvTo0frqq6+UmpqqxYsXa9SowmXQkpSdna358+dLkm666SbVqFF0Avd30dElr6jGxsZeWMCXgUxrXqLiUYpO1Z6e3oWeV1aPvThDGempMmQoNTlRh/dt1k/fTdey+Z/or5gjeuSf0xRcvbaTmEsu38yN1/487pm8XFjz3VPu5en8V5r3+TEZ1ootz33yvZnaut/+Se3ofj3UP9/96/mlZeS9pzMys/Tf5x/RHf3y/nC1bBSuT597WJ6eHvp8yUodiz2jGYtW6B+jh1RovEB+Wcqr+HD218Ez3wVjpmEo//Vj7jwepbioLDCPbLL3s8flLjsr73dkacrKPc7/7c8uosFtabSKGKL23e8udP9rWJMItel6uw5sXap5792mnJws/e+rp9SiwyAFBtcpNM8dT8yXr39woe83bNFNnW54UItnTNC2X79USuIpLfvySY16Yn6Z4gX+rjQ5T1hYydUrVWnv3rxbClu0aFHsOA8PDzVt2lQ7d+7Uvn2VV6kSGRnpOG7ZsqXT8TfeeKO2b9+uhIQEBQcHq1WrVurXr58mTJigkJDCO6lUpEpL3Pv27atatWrp9OnTmjt3brGJ+8qVK3X69GlJKlVZfa7wcPZ9zS/+dIxSks4VeS4gKETVa9WXl3feamR2KfaGzsqyfwKV/3llVbt+owJft27fTf1ufUBvPTNSm9cu1VNjuurNmb8WKocvGHNmibHkxmt/XvFNkXBp8fbKu7DLzHK+xZv1/Bgf75I/CLoQ73y5ULN+XCVJ6tCyid79x7hix3p75b3uVU0aFEja85syfpTm/hQpa2aWvlu1nsQdlSp/Eu3sr0P+JN/rb8lO7jzZcn4DZoF52OTmspB0NkbpqUVfq/j6hygotL48PPP+zpdmi7fs83/7PbzKdq3i41etxPNXXjNAPYf9U6vmv6Qsa5q2rPlcPYc8W2hcUUl7LncPTw2+f7pORG1UXOwB7du0SElnY0p93zxQkk6dOjkdY6atznIXX/39/RUcHFzi2PDwcO3cuVNnzpyR1WqVt7d3ieMvVFpamt5//31Jkre3twYPHuz0OT///LPj+MyZM4qMjFRkZKSmTp2qWbNmlWqOsqq0xN3Dw0MjRozQhx9+qBUrVig+Pl7Vqxe+/zS3TD4gIKBSf9BL3VefvKDVS78s8lyvAXfp8SmfFdh6LaMUpeTW9FRJkq+f81K1svDy9tFjL83Q/YOaKO7UCc3+8Fk9+WrBn+HvMZeUuOfGK0m+pbgVAJeGQL+8D2lSS1H+nrviHVCKsvrSmLn4Z02ZPleSvfndd289V2LJfqBf3rk+EW2LHVe9WqCuubKxNuw6oF2HjyozK4s93VFpfPMlzhkq+X7z/Od9/5Zw524V52wOScrIdyHpexGXfaL0Vn77grb9WvS1Svvud2nYA5/JyyffNrGlKH/Pstr/9nuXoqy+rDr2HqdfFkyRYRg6uu9XqYjE3Rl3dw916Hmvls97RpJ0ZN+vuvq6om+pgnnZDPvD1cwQQ1klJ9tvhwkIcP5/Nv+t1ikpKRWeuE+ePFnHjx+XJD388MOqV69esWPbtGmjIUOGqFOnTqpXr56ysrJ04MABzZkzRytWrFBCQoJuvfVW/fDDD7r55psrNM5cldpVfvTo0frwww+VlZWlb7/9Vg8++GCB8+np6Vq0aJEkaciQIfLzK/09nCdOlNyEJDY2tlSfQF1OvLx9FFitupIT4xXnpHlbStI5ZZxPhKvXrrzqhqDgGmpx9bXa8cdKbYxcouzsLHnkK42rXivv0+i4U9HF3gufe16SLBaLqjtpZIdLh4+3l0KrBepsYrJizsSXOPZccopS0+2rM/Vrle62nJJ8u3Ktnnj3v5KkK+rU1A/vvqAawUElPqd+rRrSHntn+fq1S47BHuMB2WyGziWlqHb1yi3BwuXr7w3nmpUwtqRGdtXPf50hQylGTokN6uLOr+1Xk7s8Lay4w87Ty0d+AdWVlhKvpLMlX6ukp5xT5vnEPah65V2rBFSrJd+A6kpLjlPS2ZNlnqdmWF4ZbnI55gHy27hxo+rWrevqMEot4/wCipeX88rH/Il6enrF3gY7Z84cffTRR5LsJfKvvvpqsWMnTpyoKVOmFPp+586ddffdd2vatGl64IEHlJOTo3Hjxunw4cPy8amYBaL8KjVx79y5s5o0aaLDhw9rzpw5hRL3JUuWKCXF/mnqhZTJS+a6V8MMHp/ymR6f8pnTceGNW2rvtrX6K/qwcrKzi90SLjrfFnDhDYu//6QiVDufjFsz0pSUEKfQGnm/fMIb5/2Rizl2QI2vbFfsPNFHD0iSatQOZyu4y0yLhmFav2Of/oz5S9nZOcVuCXcw3xZwRW3VdiGWrt2k8a9+JJvNUJ3qIfrx/RdVv1bJXe0l+z3sC1dvkCTZckpelcyx5Z03e8MUXNzCLd6ORnHRRqZKWgCPPr/lm7uken/b8i3c4pU3jzLVQkXftpRjGPrrfOJe0rZxuLQMe+AzDXvA+bVKzbCWOrZ/rc7+dVg5OdnFbgl35mTetUrN+pV7rfL3e+DLNAeVJagEdevWrZS8qCLe859//nmB7vCSHAltZqbzXkP5G8f5+lbcbbBr1qzRfffdJ0kKDQ3Vd999V+L8zkr6J0yYoE2bNmnmzJk6efKkvvvuuwvObUuj0j/izg16/fr1Onr0aIFzuWXytWrVUt++fSs7FEhqefV1kqSM9FQd3r+l2HF7tv7mOG5x9bWVGlP8mbxPnf9e4p4brz2mX4ud41zcXzp5/KCkyo8X5nNtG/sFW2q6VdsO/FnsuLXb8xqidG1T9ou81Zt36e6X3lN2To5CqwVqyXsvqHH9ws2KinLd1XkfRh05earEsUdi/pIk+Xh5KjSI2z9QeZrJx9FQbreRVuy4LMPQAWXkPedvF3atLHkXPruN4ldHopShjPMZfv7nAJLUoLn9b3+mNVUnjxR/rXJ0f961SoPmlfe3PzXpTN4WdSFlX9nMvxVcYDnmgQsZFhkmeMi4eD8ECgy03w6Tu3hbktTUvNtgS1NaXxqbN2/WLbfcIqvVqoCAAC1btqxUTemcmTBhguM4f8O7ilRlibthGJo3b57j+2fPntXy5cslSSNGjJBHMSu/qFide97iOF71w+wix9hsNq1e9pUkyT8wWG069qy0eOJORevArt8lSTXrNpCvf2CB8/UbNFdYI/t/pnUrF8iaUfQF5S8/fuE47tKTXgmXm4HXRziOv/zf6iLH2Gw2zf3J/os0OMBf3a9pXabX+n3XAY18bqqsmVmqFuCnxf/3vFo1Kn2JZrerWzrK6f+3fotycnKKHHf05CntPL+He5c2LYrd5xSoCH4WN119fnV8h9IUZxTdFGyDkezY8q2rpfBFVBv5yf/8pcUvRlKxDZFWGUmO4y5FzIPLW8uOedcq2yKLv1bZ/pv9WsXHL1iNWvWstHg2/TLD8V5u2LJ7mebIycnW1shZjq8btry+IkIDKs2+ffvK/Rg6dGiheXOrA1JTU5WQkFBiDLm3RtesWbNC7m/fs2eP+vXrp+TkZHl7e2vRokXq3LlzueeVpFatWjmOY2JiShhZdpV+Jdi8eXN17GjfFmnu3LmO7y9YsMBRIlEZpQQoWvPWndSqfTdJ0srFn2v/zg2Fxiye856ij9g/FR448tEC95zn2rUlUkMiPDUkwlP/njK20PmYYwe1c1PRCVSu1JREvfvCXcrOsr8PevW/s8hxQ+58QpKUnHhWsz94ptD52OjD+m7WVElS3fCm6tJzSImvi0tPx1bNdO35lewvfvxFf+w+UGjMB1//oAPnS+UfvK2/PP/2YeGv2/Yo4PrbFHD9bZrw2kdFvs7OqCMa/vQbSk23yt/XWwumPqv2Vza5oFjd3d31+Ej7Renxv87ozdnfFRqTnZ2jJ96dIdv57jP3Db7hgl4D+LuVtkQNyjmoQTkHNdcWV+SYoW6hkqQcSZ/aTivnb0l3opGjWYb9uf5y042Wwt24PS0WDbQES5JOKFMLjcIdxPcb6frZSJQkXSVfNbdU/H2AuLiFNe2kBi3s1ypb1nyu4wcLX6usX/qezpxfwe7a79Eit447sjdSL4zy1AujPPX9p4WvVc6dOaqTR7eVGMuBrUu15nv7va+eXr66pseYQmP+3LNG6akJxc6Rk52lxf8d74j3ymsGqlol3pMPVIQWLVqU+1GtWuG/E/kT3P379xc6nys7O1uHD9u32a2IFfHDhw/rhhtuUHx8vDw8PPTNN9+oT58+5Z43V0XcWuBMlSxzjx49Wps3b9bu3bu1c+dOtW3b1lEm36RJkwr7pAOlM+7Jd/XMfT2UaU3XlEf7a/g9z6hNxx7KtGbotxXfaMXCGZKkelc015DRT5TpNc7GxerFh25Uw2Zt1bnnYDVpcY1CqteWu4eHzsWd0v6d67Vy8ec6F28vBb6iSWvdes/TRc7Va8DdWrVklvbtWK9l8/+jc/GndOOQ++QfFKyoPZv07czXlZaaJDc3N4178r1i79vHpe3tx+5V34f+qXRrpgb/41U9ddcwdb+mtdKtmVqwap0+X7JSktQsvK4eGznoguf/M+YvDX7yNSWk2Mu2Xhx3h4IC/LTnz+PFPqdmSDXVCin8R+vB4Tfru1/WafvBI3rj8/mKOn5So2/uoZrB1fTnyVP6+Nsf9cdu+60fN3VpryE9u1xwvLh07DHSFWvk3QuYpLwqjVgjUyttiQXG93UreXur4lxt8VN3S6B+NZL1h1L1oi1at7iFKFQeOmZY9a1xVmfON6a7x1Kj2MZzwyyhWmskK0ZZ+tyI00lblrpbAuUli3YZ6ZpvxCtHkpcsut+tZplixaWv/93vasaUHsrKTNfsN/ur++Bn1LhVD2VlZmjXhm+0+Rf7tUr1us113YCyXasknDmmz17tq/BmXXTlNQNV94q28q9mf0+ePX1Ee/74Xns3fudYbb9p9NQit3Db/tsXmvN/Q9XimoFq1KqHatS9Ut6+gcrMSNHJI1u16ZeZOhNjv1XLP6iWBtz9bpnihesZhv3hamaIoay6devmOI6MjFSXLkVf42zevNlRKn/dddcVOaa0oqOj1bdvX8XGxsrNzU2zZ8+u8N3M8u9PX1J3+vKokgxn5MiReuqpp5STk6M5c+YoNDRUv/1mvy+J1faq1/jK9nrq9bl6/8UxSktN0lef/LPQmHpXNNcL7y8uVLp+oY5G7dTRqJ0ljunYrb8efXGGvH2K3lXA3d1dz77znV55fJCi9m7Whl++14Zfvi8wxtPLW+Mn/VsdrutXrnhx8bq6eSPNfvkJjXvlAyWlpju2aMuvWXhdLXjr2QJbyJXW+h37dOZcXoI0+cNZTp/z7L236fmxtxf6vo+3lxZMfVa3PfOmth34UwtWrdOCVesKjbupS3vNevmJKvkUF+a1wkjUL/lKy/PbpwztMwpug9hXZUvcJekxS22lGTZtVqp2Kl07bQXvU3eTNMISqn5uwcXO4Wdx04tu9fWyLUYnlaXlRqKWGwU/XPCTm550q6PGrLajGPUattftj87Vgk/GyJqepJXfFL5WqV63ue6atFjevuW7VjkR9btORP1e7HlPbz/dfOc7iuhzf7FjMjNStHP919q5/utix9QOv0q3PzpHIbUalSte4GLWs2dPVatWTYmJiZo9e7aefvrpIq9zZs2a5TguquS+tE6fPq2+ffs6eq19+umnGjVqVJnnK860adMcxz169Kjw+aUqStzr1Kmj3r176+eff9a8efNUvXp1x6eXJO6u0an7QL0/b6t+/PpDbV77P8WfjpaHp5fqhjXRtX2Ha8DtDxWbSJdGy6uv1UsfLtOOjat0eN8WxZ+OUUL8KVkz0uQXEKRa9Rrqyqs66/qbRhRoQFecoOAaenPmb1qxaIZ+Xf61oo/slzUjVSE16qltRC8NGvmormhStnuWcenof11H/T7r//TJ/KVavmGrYs6clZeHhxqH1dHQnl014dZ+8vOp2D1Ay6pOjRCt/vR1zV76i+avXKv9R6OVmJKq0KBAdWzVVKNv7qlbulONhKrlbXHTS+71tcaWpFVGko7KqhTZFCx3tbb4aqAlWC1K0UyunsVL/3ZroKVGgtYayYpVlrJlqIY81NHir1ssIaplKVzaDOTXosNAPfLmVm346UMd2P4/JZ2Nlru7l6rXaaLWnYer840Pycu77Ncq9Rpdo+EPzdaJqN8Vc2SLkhP+UlpynGw52fL1D1GtsFZq3Lq3OvQaq4BqtYqdp9ugSarT4GqdiPpDp2P2Ki0pTukpZ+Xu6a2AarVVr1EHte48TK0ihsjNjR1CcHnz8vLSY489pldeeUX79u3TO++8o0mTJhUYs2HDBs2cOVOSPQmOiIgoaipHwt+gQYNCTdAlKSEhQTfddJMOHLDfQvnee+/p/vuL/wCuKLt27ZKvr6+aNm1a7Jjp06drxgx7FVCdOnXK9UFDSSxGcZ1jKtjs2bMd2wEEBwcrISFBHTt21KZNmyrl9aKjoxUebr9/aMaPR1SDfb1xieqbscjVIQCVanXP510dAlCptn+5x9UhAJUiMT5a7zxqrzA4ceLERbmdc/6c4vNl5sgp4k5F697+F++/a3Jysjp27KiDB+23BY4fP14jR46Ur6+vVq9erddff10pKSny9fXV+vXr1a5duyLnKSlxt1qt6t27t9avXy/Jvlj8zDOFe2Xl5+/vr0aNClbEzJo1S+PGjVOvXr108803q02bNqpevbqys7O1f/9+zZkzRytWrJBkrxJeuHChBg268FsyS6PKbgYeNmyYHnzwQaWnpzs6CLLaDgAAAACXj8DAQC1dulT9+/dXVFSUpk+frunTpxcYExQUpDlz5hSbtDsTGxvrSNol+zbkuT3WitOjRw+tWbOm0PdzcnK0cuVKrVy5stjnVq9eXTNnzqy0pF2qwsQ9MDBQgwYN0rfffivJ/onEyJEjq+rlAQAAAAAm0LRpU23btk0ff/yx5s+fr0OHDikzM1Ph4eHq37+/Hn/8cTVo0MDVYap///6aOXOmNmzYoG3btunUqVOKj4+XYRgKDQ3V1VdfrX79+umee+5RUFBQpcZSpe23v/nmG33zzTdV+ZIAAAAAUG50la9Y/v7+evrpp/X000XvLOVMSXd8N2zYsMTzpVWrVi2NHTtWY8cW3lKyqlX6Pu4AAAAAAKDsSNwBAAAAADCxKi2VBwAAAICLkWFYZBiF9xx3RRy4/LDiDgAAAACAiZG4AwAAAABgYpTKAwAAAIATNsP+cDUzxICqx4o7AAAAAAAmRuIOAAAAAICJUSoPAAAAAE4Yhv3hamaIAVWPFXcAAAAAAEyMFXcAAAAAcMoiQ2bYQ90MMaCqseIOAAAAAICJkbgDAAAAAGBilMoDAAAAgBM2mWMPdZurA4BLsOIOAAAAAICJkbgDAAAAAGBilMoDAAAAgBPs4w5XYsUdAAAAAAATI3EHAAAAAMDEKJUHAAAAACcolYcrseIOAAAAAICJkbgDAAAAAGBilMoDAAAAgBM2wyKbYXF1GKaIAVWPFXcAAAAAAEyMxB0AAAAAABOjVB4AAAAAnKCrPFyJFXcAAAAAAEyMxB0AAAAAABOjVB4AAAAAnKBUHq7EijsAAAAAACZG4g4AAAAAgIlRKg8AAAAAThiGZDNBmTql8pcnVtwBAAAAADAxEncAAAAAAEyMUnkAAAAAcMIwLDIMi6vDMEUMqHqsuAMAAAAAYGKsuAMAAACAE+zjDldixR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJ2wyxz7uNlcHAJdgxR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJ+gqD1dixR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJyiVhyux4g4AAAAAgImRuAMAAAAAYGKUygMAAACAEzbD/nA1M8SAqseKOwAAAAAAJkbiDgAAAACAiVEqDwAAAADOmKSrvMwQA6ocK+4AAAAAAJgYiTsAAAAAACZGqTwAAAAAOGGz2R+uZoYYUPVYcQcAAAAAwMRI3AEAAAAAMDFK5QEAAADACcMkXeXNEAOqHivuAAAAAACYGIk7AAAAAAAmRqk8AAAAADhBqTxciRV3AAAAAABMjBV3AAAAAHDCJslmgtVutnG/PLHiDgAAAACAiZG4AwAAAABgYpTKAwAAAIAThmHIMEFnODPEgKrHijsAAAAAACZG4g4AAAAAgIlRKg8AAAAATrCPO1yJFXcAAAAAAEyMxB0AAAAAUKXS0tL01ltvKSIiQqGhofL391eLFi305JNP6tixY+We/+jRo7JYLKV63HPPPaWac968ebrxxhtVp04d+fj4qEGDBrrzzju1YcOGcsfrDKXyAAAAAOCEYZNsNldHYY/jYnfo0CH1799fUVFRBb5/4MABHThwQDNmzNCcOXM0cOBAF0VYUHp6uoYPH65ly5YV+P7x48c1Z84czZs3Ty+++KJeeumlSouBxB0AAAAAUCWSk5M1YMAAR9J+//33a+TIkfL19dXq1av1xhtvKCkpSSNGjNC6devUrl27cr/mq6++qsGDBxd7PiQkpMTnjx071pG09+rVS48//rjq1aunXbt26fXXX9fhw4c1ZcoU1a1bV+PHjy93vEUhcQcAAAAAVIm3335bBw8elCS99dZbmjRpkuNc165d1bNnT/Xo0UNpaWmaOHGi1qxZU+7XrF+/vq666qoyPfeXX37R119/LUkaNGiQFi5cKHd3d0lSRESEbrnlFnXo0EHHjx/X5MmTddtttzn9IKAsuMcdAAAAAJzI7SpvhsfFKisrSx988IEkqWXLlnryyScLjbn22mt13333SZIiIyO1adOmKo3x79555x1JkoeHhz755BNH0p6rRo0amjp1qiQpISFBM2bMqJQ4SNwBAAAAAJVu9erVSkxMlCSNGTNGbm5Fp6P5m8UtXLiwKkIrUnJyslatWiVJ6tu3r8LCwoocN2zYMAUFBUmqvHhJ3AEAAAAAlW7t2rWO4x49ehQ7rmPHjvLz85MkrVu3rtLjKs6mTZuUmZkpqeR4vby81KVLF8dzsrKyKjwWEncAAAAAcMJmmOdxsdq7d6/juEWLFsWO8/DwUNOmTSVJ+/btK/frfvjhh2ratKl8fHxUrVo1tW7dWg888IC2bt1aIfHmP5+dnV2oW35FoDkdAAAAAFyEYmNjnY4prrzbFaKjoyVJ/v7+Cg4OLnFseHi4du7cqTNnzshqtcrb27vMr5s/Qbdardq7d6/27t2radOmacKECfr3v/9d5Py58UrO/x3Dw8MdxydOnFCrVq3KHG9RSNwBAAAA4CLUqVMnp2MME3WzS05OliQFBAQ4Hevv7+84TklJKVPiHhwcrKFDh6pnz55q1qyZfHx8FBsbqxUrVmjmzJlKSUnRtGnTlJycrDlz5hQbb2li/nu8Fe2ySNy77/9A9U8HuToMoFLs7vq4q0MAKtX2L4e4OgSgUrW7q7WrQwAqRZxR8ff5upJZOrqbIYayysjIkGS/J9yZ/Il6enr6Bb9WvXr1FBMT47hXPlf79u3Vv39/Pfzww+rbt6+OHz+uuXPnasSIEbrllluKjLc0MZc3Xme4xx0AAAAALkIbN27UiRMnSnyUhcViKfdj1qxZheb18fGRJEfDt5JYrVbHsa+v7wX/DF5eXoWS9vyaNWumr776yvH1hx9+WGhMbryS85jLG68zl8WKOwAAAABcaurWrWuqe9idCQwMlFS6UvLU1FTHcWlK68vi+uuvV6tWrbR3716tXbtWNputwBZ1ufFKzmOu7HhJ3AEAAADACcMwZJigpXtV3LNeEZ3c69atW+h7YWFh+uOPP5SamqqEhIQSG9TlVgvUrFmzXI3pnMlN3DMyMhQfH6+aNWsWiDdXdHS0Onbs6DReqWCjuopC4g4AAAAAcHC29VlZtWrVSt99950kaf/+/Y69z/8uOztbhw8fliS1bNmyUmLJZbFYij2XvzP8/v37S5wn97yHh4eaNWtWMcHlwz3uAAAAAIBK161bN8dxZGRkseM2b97sKD2/7rrrKjWm3L3avb29Vb169QLnIiIiHE3pSoo3MzNTv//+u+M5np6eFR4niTsAAAAAOGEzzPO4WPXs2VPVqlWTJM2ePbvYsv/8je2GDh1aafGsW7dOe/bskWT/UCH//e2S/R73Pn36SJJWrlxZYF/3/L7//nslJSVVarwk7gAAAACASufl5aXHHntMkv0++nfeeafQmA0bNmjmzJmSpB49eigiIqLIuXK71zds2LDI84sWLSqxH8ChQ4c0atQox9cPPfRQkeOeeuopSfby/Ycfflg5OTkFzsfFxWny5MmS7PvGjxs3rtjXLA/ucQcAAAAAZ0yyj7vMEEM5TJo0Sd98840OHjyop59+WocOHdLIkSPl6+ur1atX6/XXX1d2drZ8fX31/vvvl/l1hg4dqqZNm2rYsGHq1KmTwsLC5O3trdjYWC1fvlwzZ850dIq//fbbNWzYsCLn6d27t0aOHKmvv/5aS5Ys0Q033KCJEyeqXr162rVrl1577TUdP35ckjR16lSFhISUOeaSkLgDAAAAAKpEYGCgli5dqv79+ysqKkrTp0/X9OnTC4wJCgrSnDlz1K5du3K91qFDh/TWW2+VOObBBx/Ue++9V+KYzz77TElJSVq2bJlWr16t1atXFzjv5uamF154QePHjy9XvCUhcQcAAAAAVJmmTZtq27Zt+vjjjzV//nwdOnRImZmZCg8PV//+/fX444+rQYMG5XqNJUuWaMOGDfrjjz907NgxxcXFKTU1VUFBQWrcuLGuv/56jR07VldddZXTuXx9fbV06VLNnTtXs2bN0o4dO5SQkKDatWvr+uuv1yOPPKKuXbuWK15nSNwBAAAAwAmbzZDNBJ3hzBBDRfD399fTTz+tp59+ukzPd7af/aBBgzRo0KAyzV2cUaNGFbgvvirRnA4AAAAAABMjcQcAAAAAwMQolQcAAAAAJwyTdJU3Qwyoeqy4AwAAAABgYiTuAAAAAACYGKXyAAAAAOAEpfJwJVbcAQAAAAAwMRJ3AAAAAABMjFJ5AAAAAHDCZhiymaBO3QwxoOqx4g4AAAAAgImRuAMAAAAAYGKUygMAAACAE4bN/nA1M8SAqseKOwAAAAAAJkbiDgAAAACAiVEqDwAAAABOGDJkmKCjuyHXx4Cqx4o7AAAAAAAmRuIOAAAAAICJUSoPAAAAAE4YNslmgo7udJW/PLHiDgAAAACAiZG4AwAAAABgYpTKAwAAAIAThmGSrvImiAFVjxV3AAAAAABMjBV3AAAAAHDCZtgfrmaGGFD1WHEHAAAAAMDESNwBAAAAADAxSuUBAAAAwAnDZsgwQZ26GWJA1WPFHQAAAAAAEyNxBwAAAADAxCiVBwAAAAAnDMP+cDUzxICqx4o7AAAAAAAmRuIOAAAAAICJUSoPAAAAAE7YDEM2E3R0t1Erf1lixR0AAAAAABMjcQcAAAAAwMQolQcAAAAAZwxDhhnK1M0QA6ocK+4AAAAAAJgYiTsAAAAAACZGqTwAAAAAOGHY7A9XM0MMqHqsuAMAAAAAYGIk7gAAAAAAmBil8gAAAADghM2QbCbo6G5zfQhwAVbcAQAAAAAwMRJ3AAAAAABMjFJ5AAAAAHDCMAwZJiiVN0MMqHqsuAMAAAAAYGIk7gAAAAAAmBil8gAAAADghM1myGaClu5miAFVjxV3AAAAAABMjBV3AAAAAHDCMOwPVzNDDKh6rLgDAAAAAGBiJO4AAAAAAJgYpfIAAAAA4IRhGDJM0BiOfdwvT6y4AwAAAABgYiTuAAAAAACYGKXyAAAAAOCEYRiymaBMnVL5yxMr7gAAAAAAmBiJOwAAAAAAJkapPAAAAAA4YdhM0lXeBDGg6rHiDgAAAACAiZG4AwAAAABgYpTKAwAAAIATlMrDlVhxBwAAAADAxEjcAQAAAAAwMUrlAQAAAMAJm2F/uJoZYkDVY8UdAAAAAAATI3EHAAAAAMDEKJUHAAAAACcMwyRd5Q3Xx4CqR+J+mTkel6BPVv6h5TujFH02Sd6e7mpUM1TDIlppQu9O8vP2rPDXTLNmKeKFT3Q0LkGSdEX1atr39sQix762aI1eXxJ5QfM/d0sPPT+kZ/mCxCUpNuaEvv1qmtZF/qxTf8XIy8tL9cMbqU+/wbpt1Dj5+PqVa36bzaajfx7U3l1btXfnVu3dvU2HDuxRVlamJOmT2UvUoVM3p/OsXbNC+3Zv1d5d23Qy+qjOnY1XSkqS/Pz8VS+soTp0uk5Dbh+jBo2alSteXHoSzhzThuUf6eC2/ynx7Al5eHgrtHZjXdX5NnW68UF5eZfvPb41crYWThtXqrFDJ8zQNT3GFHlu5it9dHTfr6Wa55W5WaWOD5eGBCNbB5Whg0aGoowMRSlDybJJknpbgvSEW50Kf81IW5JWGkk6KqtSZVOw3NXa4qsBlmC1sPiWao4Mw6alRoLWGsn6S1nKkqEa8lCEJUCDLMGqZan4ayoAly8S98vIsu0HdN9/Fyop3er4Xlpmls6lntTWoyc1+9dt+m7iKDWpHVqhr/vKotWOpL0yNKtTvdLmxsXrt9U/6aWnJyg1JdnxvYz0NCUlbtO+3du0ZMFXevfTrxXeoHGZX+N/S77Rv559uFxxZmdn68kHRxZ5LjkpUQf27tCBvTv07Zz/avyjz2rM/RPL9Xq4dOzf8qMWfDJG1vQkx/eyrGmK+XOLYv7cos1rPtNdkxarep2mLowScO4u259V9lpWw6Y3bbHarNQC3z+jbK0xkvWrkayRluq6w63ka4uTRqZetsXopAp+0BSjLMUY57TCSNSTbnXUyRJQ4T8DgMsTiftlYvuxWN396QKlZ2YrwNtLTw3opu4tGio9K1sL/titz3/dqqhT8br1/bn67cX7FejrXWGv+/HPv8vH00Oe7m5Kzsgscfz9vSM0pGOrEsfk2Gy6aeosJaVbFeTrrUHXtKiQWHHpOLB3p57/x32yZqTLzy9AY8ZPVIdO3WS1ZmjFsu+1eP4XOn70kP7xwEjNWrBK/v6BZXqd/KVqHp6eatKslXKys3To4N4LmicgMEjXdOqm1m07qH5YA9WoWUc+vr46c/ovbd24Vj98P0cpyUn65N1/KTCwmoaNvLdM8eLScfLoNn374ShlZabLyydA3W+ZrEateig7K0O71n+jzatnKj72oL58e7AefPV3efuW7T2e35hnlikwpG6x54NCw5zOUb9xBw2dMKPcseDSVVMeCpOXtimtUub/wDjlSNrbyleD3EIUKg8dM6yab5xVrLI014hXiM1d/dyCi5wjzbDpX/mS9pss1XS9JVDesminkaYFxlmlyaa3bLF6yy1cjS0+lfKzoOoZhmGKMnUzxICqR+J+mXh63k9Kz8yWh7ubljx5pzo3DXec69mykZrUDtU/569U1Kl4fbB8Q4WUnufYbHpk1g/KsRl67pZumv3bNqeJe60gf9UK8i9xzPKdUY6qgaEdW8nXi1I0FPTu68/KmpEudw8PfTBjgdq07+Q417FLd4U3aKyP3pmi40cPae7nH+v+R54p0+s0anKlnnz+TbW8qr2at2wjb28f/fejNy8ocffw8NCKDYfl7u5e6FxLSd1736zb7xyvMcN7KSkxQdM/fEODb7u7yPG4fCz74h/KykyXm7uHxjyzTFc07+o417h1L1Wv00zL5z2j+NiDWrf0PfUe/mK5X7N63WYKqdmwXHN4evurdvhV5Y4Fl5aRllA1s/iomXwUYvHQKSNL42xHKvx1dhhp+tWwV2F1kr+ec6snd4tFktTc4qNORoCesB3TGWVrlhGnbkagAiyFf9d+b5xVzPmk/V5LDQ1zy6tUbGHxVRvDT8/aTsgqQ/+1ndEb7uGF5gAgpaWl6aOPPtL8+fN1+PBhWa1WhYeHa8CAAXrsscfUoEGDcs3fsGFDHTt27IKec+TIETVs2LDA96ZMmaKXX365VM9fvXq1evbseUGvWVp0lb8MbP4zRusOHpckjbm+fYGkPdfjN12rFnVrSJI+WfmHsrJzyv26H//8h7Ydi1XzOtX1j/7O7/MtrbnrdzqOR117dYXNi0vDnp1btH3LBknSLbfeWSBpzzX63kfUsElzSdI3X05TdlbZ7qlt3baDbr9zvNq0i5C3d9lXVJwl4fXCGqhPvyGSpHNn43Tsz4Nlfi1c/KIPbdSx/WslSR163lsgac917YAnVLN+S0nShp8+VE42943DvEa71VAnS4BCLJW7nrTQdlaS5C7pQbdajqQ9VzWLu+6x2K+FUmXTCiOx0BzZhqEfjQRJUri8NMQSUmhMS4uvbrBUkyTtVroOGhkV+FPAlQybZLMZLn8YNlf/S5TfoUOH1K5dO02ePFmbN2/WuXPnlJaWpgMHDujdd99V27Zt9eOPP1ZpTNWqVVOdOhXfU6OikLhfBn7Ytt9xfNd17Yoc4+Zm0R3nk+CEtAxF7j9artc8HpegVxetliT9++6B8vKomNXBpHSrlm63/zwNawTruuZXVMi8uHRErlrmOB44dFSRY9zc3NR/sP2+8uSkRG3e+FuVxFYefv5590laM60ljMSlbt/mJY7j9sU0g3Nzc1O76++UJGWkJejI3jVVERpgWmmGTTuULkm6Wn6qUUzjuK6WQPmdvzzeYKQUOr9TaUrN1zjP7W/Jf64+liDH8e9FzANczpKTkzVgwABFRUVJku6//36tWrVK69ev12uvvaaAgAAlJSVpxIgR2r59e5lfZ8WKFdq1a1eJj/fee88x/vbbb5ePT8kLMc7mi4iIKHO8zlAqfxnYEGVfbff39lT7hvWKHXf9lXnlKL8fOq6+VzUp82tO/GqZUq1ZuqNrW3Vv0bDM8/zdwk17lJ6ZLUm649q2shTzBxOXrx1bfpck+fr5q0XrdsWOu6bjtY7jnVv/UJfreld2aGWWkZGuX1f9T5I9IbuiYdn/b+Lid+zgOkmSl7e/6jXqUOy4hi2uz/ec9Wra9oZKjw0wqyhlKFv2+4KvshS/24KnxaIr5aNtSrM/xzDkke9aY6+R7ji+qoTu883kI29ZZJVR4DkApLffflsHD9qrB9966y1NmjTJca5r167q2bOnevToobS0NE2cOFFr1qwp0+s0b97c6ZhXXnnFcXz33Xc7HX/VVa673YvE/TJw4GScJKlxrVB5uBdfZNH8fKl8/ueUxfw/dmv5ziiF+PvojRE3lnmeolAmD2eOni8jD7uikTw8iv8V16Bx3i/zoyYsPc/OylLcmVPaue0PfTnzA504dliSNGjY6DI308Ol4UyMveootE4TubsX/x6vWS+vcWfuc8pj4bRxijt5UGnJcfL2DVJonSZqclUfdeo7QUGh9Us1R9zJA5r2wrWKiz2o7KwM+QXWUL1G16hVxFC1vXak3D3oWYLKccLIq1QKs3iVODbM4qVtRppyJJ1Upq5QXsPeE0Zer54wFT+Pu8WiuvLUUWUqWiX398HFg+Z05ZeVlaUPPvhAktSyZUs9+eSThcZce+21uu+++zRt2jRFRkZq06ZNlbKSnZiYqCVL7FVsjRs3VrduFXdrb2Wo9FL5WbNmyWKxlPoxZcqUyg7pspKRla24FHtn1vohQSWODfH3lf/5fdyjzyWVOLY451LT9fS8nyRJ/7q1r2o6aTR3IY7FJWhdlL3BRNem4Wpcq2K3rcPFz2rNUMK5eElSrdrFV5dIUlC1YPn62d+fp2JjKj220jgZc1ydW4aqc8tQXde2tgb3aasXnrpfB/ftkiR16dZbj01+xcksuJRlZWYoLdn+waqzLu6+ASHy8ra/x5PiT5T7tY/sjVRyQqxycrKUlhKv6EMbFbnoDb33RAttWjW9VHOkJJ5S9OFNykhLVHaWVUlnY7R/yw/6/tOx+vjZjjods6/ccQJFiVO247iGk3Wr/OfzP0+S4s9/7SNLkY3rCs5jv6ZKVI6yLoWbkoEKsHr1aiUm2vtHjBkzRm5uRaej99xzj+N44cKFlRLLt99+q4wMew+K0qy2uxor7pe45Hx7tgf4lPwJsyT5eXsp1ZqlFCfd34vz/Lc/63RSqjo3CdO9Pa4p0xzFmbd+h3I/YBx9HavtKCwtNe8+wtykvCQ+vn5KT0tVelqq07GuFBxSXZNeeEu9bryFbvKXucyMZMexl7fz/aE9vf2VaU2V1Vr2e2xDajVWq4ghCm/WRdWq2z8sOHf6iPZsXKi9G79TdlaGlsx8WJJFEX3uL3IOi8VNjVv3VvN2/VSnwdXyCwiVNSNFsUe2atMvM3QmZp/OxOzV56/eoAmvrFdwDfqXoGKlKy9x9nGybpX/fP7nSVLa+a+dzSFJPhaLzlfnK12GqCcBpLVr1zqOe/ToUey4jh07ys/PT2lpaVq3bl2lxPLFF19IkiwWi+66665KeY2KVOmJ+5AhQ9SxY8cSx0yaNEk//WRfpS1v238UZM3O+6TYsxQX/N7nm8hllKHL9toDx/TF2m3ycHfTv+8eWOH3n3+9wb7q6OPpoWERrSt0blwaMq15nXs9PZ1/UOXlZR9jtZqj42+tWnU1d7H9D1pOTo5On4rV72tXacl3X2nqy08q+sRR3TP+CRdHCVfKzsp7r5amrNzD017im51Ztvd4q4ghat/97kK/z8OaRKhN19t1YOtSzXvvNuXkZOl/Xz2lFh0GKTC4cEfeO56YL1//4ELfb9iimzrd8KAWz5igbb9+qZTEU1r25ZMa9cT8MsULFCdLeaXFzv7neCrv/Z5pGMr3pWMeDzm/xikwj2yy97PHxcywGTJsri9TN0MMZbV3b96WuS1atCh2nIeHh5o2baqdO3dq376Kr8Y6cuSI4wOBbt26qXHjxqV63o033qjt27crISFBwcHBatWqlfr166cJEyYoJKTwLhMVqdIT9+DgYAUHBxd7/uOPP3Yk7aNHj9a9995bqnmjo6NLPB8bG1vqGC9l3vnu8c3Kcb7Fm/X8NnA+nhf2ubA1K1uPzv5BhiE91Lez2oTXvrBAndh4OFpRp+wl0APbX6lqfmXfegsXp9OnTio5MaHIc4HVglWrdj155duSLSvLedVIZqZ9THm2cqtIHp6eatK8lePr5i3bqFvPGzX4trv10Jhb9J/3XtGJY4f1wmsfuTBKVJakszFKTz1X5Dlf/xAFhdaXh2fee7U0W7xlZ9mrrjy8yvYe9/GrVuL5K68ZoJ7D/qlV819SljVNW9Z8rp5Dni00rqikPZe7h6cG3z9dJ6I2Ki72gPZtWqSkszGlvm8eKI38SbSz/zn5k3yvv31olTtPtpwnTgXmYSMnVJLS5DxhYSXfWlWVcnM4f3//EnNESQoPD9fOnTt15swZWa1WeXt7lzj+QnzxxReOXgEXUib/888/O47PnDmjyMhIRUZGaurUqZo1a5YGDx5cYTH+nUtL5VetWqWJEydKkjp16qQZM2aU+rnh4YX3Ikdhgb55b/DSlL+nWe1jSlNWn99bP/6mg3/FKyw0SP8c0vOCnlsac9fvcBzfQVO6y9Kn77+mpYvmFXluwJA79OIbHxfYMq005e8Z6fb+D6Upq3elZle21gOPP6+3/vWUfvx+rm7oP8zUXfBRNiu/fUHbfv2yyHPtu9+lYQ98Ji+fvMaEmaUof8+y2v8feJeirL6sOvYep18WTJFhGDq671epiMTdGXd3D3Xoea+Wz3tGknRk36+6+ro7KjpUXMZ88yXOGSr5fvP8533/lnDnbhXnbA5JysjXQMy3FCv0QFl06tTJ6RgzNbNLTrbf8hUQ4Pzvkr9/3vVZSkpKhSbuX35p/3vr6+ur22+/3en4Nm3aaMiQIerUqZPq1aunrKwsHThwQHPmzNGKFSuUkJCgW2+9VT/88INuvvnmCoszP5cl7lFRUbrtttuUnZ2t+vXra9GiRU73zcOF8/H0UPUAX8WnpCvGScO5c6npSrXaP4cOc9LI7u/e/Z+91KRXq8Zatr3oDt1p5+dOs2Zp/h+7JUk1g/zVs2WjEufOzM7Rdxv3SJJqBfnrhnJsU4dLm7e3j6oFhyox4axOnzpZ4tikxARHcl+7rvlX9rr3uVlv/espSdIvy5eQuF+mPL185BdQXWkp8Uo6W3LlWXrKOWWeT9yDqlfeh90B1WrJN6C60pLjlHS25P93JakZ1tJxnFyOeYCi/L3hXLMSxpbUyK76+a8zZCjFyCmxQV3c+bX9anKXp4UV90sBpfLll9sMLvd2xZLkT9TT0ytuW8X169fr8GH7bj2DBw9WUFDJec/EiROLbKDeuXNn3X333Zo2bZoeeOAB5eTkaNy4cTp8+HCl5LUuSdwTEhI0aNAgnTt3Tr6+vlq8eLHq1q17QXOcOFFyh9zY2NhSfQJ1OWhRr6bWHTyuP0+fVXaOrdgt4Q7G5m0Bd2W9GkWOKU7m+RL7L9du15drt5c4Ni4lTfdM+06Sfe94Z4n7/3Yc1NlU+3/WEV3ayL2Y7pO4tL34xsd68Y2PnY5r1ORKbd+yQdHHjyg7O7vYLeGO5dsCrmFj5/t8ulpISN7/yb9Olr9DOMxn2AOfadgDnzkdVzOspY7tX6uzfx1WTk52sVvCnTmZtwVczfrF30dYESqip4mFFUlUonCLt6NRXLSRqZLebtHnt3xzl1Tvb1u+hVu88uZRplqo6L3ccwxDf51P3EvaNg4or40bN15wHlUaFfF7/fPPPy/QHV6SI6HNvV2xJFZrXpNtX9+i/6+VRW5TOsne2d4ZZyX9EyZM0KZNmzRz5kydPHlS3333nUaPHl3eMAup8gwoJydHI0aM0IEDB2SxWDRr1ix16NDhgucJCwsr8VEZb+CLVddm9u68qdYsbTta/CrGbweOOY67NDVPR9/8ZfLs3Q5nru7QRZK9VH7/nu3Fjtu6eb3juO01nSs7rHI7fTrvHjazl/ajcjVofp0kKdOaqpNHthQ77uj+3/I959pKiyc16UzeFnUhZf/bm38ruMByzAMUpZl8HA3ldhtpxY7LMgwdUEbec/6WvLSy5CUPu43iVwCjlKGM8xl+/ucAFa1u3bpO8yIzCQy03/KVkuL8dq/U1LzbHktTWl8aVqtV3377rST7v90NN9xQIfNOmDDBcRwZGVkhc/5dla+4P/HEE1qxYoUk6YUXXijVPQUon0HtW+idpfZO1V+u266IJoX/A9tshuadT5CD/XzUo0XDC3qN1M9ecjqm5aT3dTw+UVdUr6Z9b08s1bzxKWlavjNKktQmvLbaXlG4WzGQX48+/TV7+nuSpB8XztVVVxfe1cJms2nZ4q8lSYFB1dSx0/VVGmNZrPppkeO4ab4Gdrj8tOx4i35dMlWStC1ytsKbFv7gyWazaftvX0mSfPyC1ahVz0qLZ9MvMxz3TzZs2b1Mc+TkZGtr5CzH1w1bmv//JC4ufhY3XS1fbVGadihNcUaWalgKN+LdYCQ7tnzraimcKLSRn/zlplTZ9IuRpFuNkCJXJlcZebcndiliHlycbDJkM8H94rZSNEcsr4ro5F7UQmpYWJj++OMPpaamOjqzFye3wrpmzZoVdn/7Dz/8oHPn7I1gR40aVWHb7LZqlXdtFhMTUyFz/l2VrrhPnz5dH374oSRp+PDhRd4rgIrXsXF9XdfcvoI++7dt+uNQ4TLbfy9fr/3nS+Uf6ttZnh4F38S/7j8q/7Evy3/syxo/c1Glx5xr/h+7lZVj/wPKajtKo3XbDmrXoaskacl3X2nXto2Fxsz5/CMdPWwvlR9x1wR5FLGLwpaNa9W5Zag6twzVv559uNLijVy5VHGn/ypxzLZN6/XZJ+9Iktw9PHTjgFsrLR6YX1jTTmrQopskacuaz3X84IZCY9YvfU9nzq9gd+33aJFbxx3ZG6kXRnnqhVGe+v7TsYXOnztzVCePbisxlgNbl2rN969Kkjy9fHVNj8Ilh3/uWaP01IRi58jJztLi/453xHvlNQNVrRLvycelaaUtUYNyDmpQzkHNtcUVOWaoW6gkKUfSp7bTyvlbApZo5GiWYX+uv9x0o6XwrgqeFosGWoIlSSeUqYVG4Z0g9hvp+tlIlCRdJV81t9DDCRefFi1alPtRrVrh/0P5E9z9+/cXOp8rOzvbcR96y5Ytix13oS60TL60Knob7KJU2Yr7mjVr9Mgjj0iS2rdvr9mzZ1fJDwi7t+7op75vfKb0zGzd8n9f6amB3dSjRSOlZ2Zpwcbd+ixyqySpWe3qeuymri6ONk9umbyHu5tGdGnj4mhwsfjHc2/o/tE3y5qRrsfGDdeYCU+oQ6dusloz9POy77Xo29mSpCsaNtWoe8uXlP+4cG6Brw/u3+04/v23VYqNOe74OuyKxmp3vpQ/V+SqZXr+H/fpuh43qmOX7mrcrIUCA6spM9OqmBNH9dvqn7Tqp0Wy2ewfYN334CQ1aFRSWyVcDvrf/a5mTOmhrMx0zX6zv7oPfkaNW/VQVmaGdm34Rpt/se/SUr1uc1034IkyvUbCmWP67NW+Cm/WRVdeM1B1r2gr/2o1JUlnTx/Rnj++196N3zlW228aPbXILdy2//aF5vzfULW4ZqAateqhGnWvlLdvoDIzUnTyyFZt+mWmzsTY9/X1D6qlAXe/W6Z4cfHaY6Qr1si73zVJedvXxhqZWmlLLDC+r1vJ2xQW52qLn7pbAvWrkaw/lKoXbdG6xS1EofLQMcOqb42zOnO+Md09lhrFNp4bZgnVWiNZMcrS50acTtqy1N0SKC9ZtMtI13wjXjmSvGTR/W41yxQrcKnq1q2b4zgyMlJdunQpctzmzZsdpfLXXXddhbz2mTNnHNuQt2vXTm3aVFxukX9/+nr16lXYvPlVSeJ++PBhDR8+XFlZWapdu7YWL14sPz+/qnhpnNeuQV198cBw3fffhUpKt2rKd78UGtOsdnV9N3FUgS3kXOlAbJy2HLHfk9+ndRPVrkapGUrnylZt9dq7M/XS0xOUmpKs/7z3SqExVzRsqnc//Vr+/oFFzFB6rzz3SLHnvpjx7wJfDxhyR6HEXbLvOb9m5Y9as/LHYufy9vHVA48/p1H3VN7qPy4e9Rq21+2PztWCT8bImp6kld/8s9CY6nWb665Ji+XtW773+Imo33Ui6vdiz3t6++nmO99RRJ/7ix2TmZGineu/1s71Xxc7pnb4Vbr90TkKqVVyw1JcelYYifrFKHrnm33K0D4jo8D3+qpsibskPWaprTTDps1K1U6la6et4H3qbpJGWELVzy242Dn8LG560a2+XrbF6KSytNxI1HKj4IcLfnLTk2511JjV9ksKXeXLr2fPnqpWrZoSExM1e/ZsPf3000Uu5s6aNctxPHTo0Ap57Xnz5ikry940siJX2yVp2rRpjuMePXpU6Ny5Kj1xT0xM1KBBgxQfHy9vb28tWrSIPdhdpH+7K/XHyw/o45V/aPmOKMWcS5KXh7sa1wrV0IhWeqB3J/l5Fy6ndJV5BZrStXVhJLgYXd+rn+YsWqtvvvxU6yJ/1ulTJ+Xp6amwKxqrz02DddvocfLxdf0HiI88NUXtI67Vts3r9WfUfp2NP61z8XGyuLkpqFqwGjdtoY6du6v/4BGqUYseD8jTosNAPfLmVm346UMd2P4/JZ2Nlru7l6rXaaLWnYer840Pycu77O/xeo2u0fCHZutE1O+KObJFyQl/KS05TracbPn6h6hWWCs1bt1bHXqNVUC1WsXO023QJNVpcLVORP2h0zF7lZYUp/SUs3L39FZAtdqq16iDWnceplYRQ+TmVjH3GgLF8ba46SX3+lpjS9IqI0lHZVWKbAqWu1pbfDXQEqwWpWgmV8/ipX+7NdBSI0FrjWTFKkvZMlRDHupo8dctlhDVKuIeeuBy5+Xlpccee0yvvPKK9u3bp3feeUeTJk0qMGbDhg2aOXOmJHsSHBERUeRcuQl/gwYNdPToUaevnVsm7+HhoVGjRpUq3l27dsnX11dNmzYtdsz06dM1Y4a90q1OnToV9kHD31kMo3I7LNx9992ODe4nT56sO++8s8TxtWrVUq1axV8AlFZ0dLTjA4KD7zyh+qEXti85cLHY3fVxV4cAVKoV28q3YgyYXbu7Wrs6BKBSxBlZutd2RJK90ZjZOpyXRv6c4tbHt8g/qHLKoC9EatJJffdv+65cF+O/a3Jysjp27KiDB+39hsaPH6+RI0fK19dXq1ev1uuvv66UlBT5+vpq/fr1ateuXZHzXEjivnfvXrVubf9dO3DgQP3www+linXWrFkaN26cevXqpZtvvllt2rRR9erVlZ2drf3792vOnDmOxuvu7u5auHChBg0aVKq5L1Slr7gfP553f+fUqVM1derUEse/9NJLNK0DAAAAYCqGYaiS1zxLHcfFLDAwUEuXLlX//v0VFRWl6dOna/r06QXGBAUFac6cOcUm7Rcqf1O6u++++4Kem5OTo5UrV2rlypXFjqlevbpmzpxZaUm75ILt4AAAAAAAl6+mTZtq27Zt+vjjjzV//nwdOnRImZmZCg8PV//+/fX444+rQYMGFfJaNptNc+bMkSQFBwfrlltuKfVz+/fvr5kzZ2rDhg3atm2bTp06pfj4eBmGodDQUF199dXq16+f7rnnHgUFVW6Fd6Un7mvWrKnslwAAAAAAXET8/f319NNP6+mnny7T80tbeeDm5ubYE/5C1apVS2PHjtXYsYW3Ta1qrLgDAAAAgBOGzZDNBB3dL+au8ig7N1cHAAAAAAAAikfiDgAAAACAiVEqDwAAAABOGDbDFGXqZogBVY8VdwAAAAAATIwVdwAAAABwwjDMsYe6CUKAC7DiDgAAAACAiZG4AwAAAABgYpTKAwAAAIAThmGTYbO5OgwZhutjQNVjxR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJ2w2QzYT7KFuhhhQ9VhxBwAAAADAxEjcAQAAAAAwMUrlAQAAAMAJwzBkGK4vUzdDDKh6rLgDAAAAAGBiJO4AAAAAAJgYpfIAAAAA4IRhM2SYoKO7GWJA1WPFHQAAAAAAEyNxBwAAAADAxCiVBwAAAAAnKJWHK7HiDgAAAACAiZG4AwAAAABgYpTKAwAAAIAThmyyGTZXhyFDro8BVY8VdwAAAAAATIzEHQAAAAAAE6NUHgAAAACcMGzm6Ohugmp9uAAr7gAAAAAAmBiJOwAAAAAAJkapPAAAAAA4YdgMk5TKuz4GVD1W3AEAAAAAMDFW3AEAAADACcMwZBiuX+02Qwyoeqy4AwAAAABgYiTuAAAAAACYGKXyAAAAAOCEzWaTzeb6TdTNEAOqHivuAAAAAACYGIk7AAAAAAAmRqk8AAAAADhhGCbZx52u8pclVtwBAAAAADAxEncAAAAAAEyMUnkAAAAAcMIwbDIM13d0N0MMqHqsuAMAAAAAYGIk7gAAAAAAmBil8gAAAADghGEzSVd5E8SAqseKOwAAAAAAJkbiDgAAAACAiVEqDwAAAADO2ExSpk5T+csSK+4AAAAAAJgYiTsAAAAAACZGqTwAAAAAOGEzbLIZrq9TN0MMqHqsuAMAAAAAYGIk7gAAAAAAmBil8gAAAADghGEzTNFV3gwxoOqx4g4AAAAAgImRuAMAAAAAYGKUygMAAACAE4Zhk2FzfUd3g67ylyVW3AEAAAAAMDFW3AEAAADACZrTwZVYcQcAAAAAwMRI3AEAAAAAMDFK5QEAAADACcOwmaIxnBliQNVjxR0AAAAAABMjcQcAAAAAwMQolQcAAAAAJ2w2yWaCju4m2EoeLsCKOwAAAAAAJkbiDgAAAACAiVEqDwAAAADO2GwyzFCnboYYUOVYcQcAAAAAwMRI3AEAAAAAMDFK5QEAAADACcMwZJigq7xhuD4GVD1W3AEAAAAAMDESdwAAAABAlUhJSdGvv/6qd955R7fffrsaNWoki8Uii8Wihg0bVsprrl+/XnfeeacaNGggHx8f1alTRzfddJPmzZt3QfPMmzdPN954o+rUqSMfHx81aNBAd955pzZs2FApcedHqTwAAAAAOGEYNhmG6zu6myGG8hg0aJDWrFlTZa83ZcoUvfLKK7Ll68Z/6tQprVixQitWrNCcOXO0YMEC+fj4FDtHenq6hg8frmXLlhX4/vHjxzVnzhzNmzdPL774ol566aVK+zlYcQcAAAAAVIn89+iHhobqxhtvVEBAQKW81rRp0/Tyyy/LZrOpSZMmmjlzpjZu3KhFixapV69ekqSlS5dq7NixJc4zduxYR9Leq1cvLVq0SBs3btTMmTPVpEkT2Ww2TZkyRdOnT6+Un0NixR0AAAAAUEVGjRqlCRMmKCIiQk2bNpUkNWzYUCkpKRX6OmfPntXkyZMlSVdccYV+//131ahRw3F+4MCBGjp0qH744QfNmzdP48ePV8+ePQvN88svv+jrr7+WZK8WWLhwodzd3SVJERERuuWWW9ShQwcdP35ckydP1m233aaQkJAK/VkkVtwBAAAAwCnDZpjmcTEbP3687rjjDkfSXllmzJihxMRESdLUqVMLJO2S5O7urk8++cSRhL/99ttFzvPOO+9Ikjw8PAqMz1WjRg1NnTpVkpSQkKAZM2ZU6M+Ri8QdAAAAAHBJWbRokSQpKChIw4YNK3JMWFiY+vbtK0latWqVkpOTC5xPTk7WqlWrJEl9+/ZVWFhYkfMMGzZMQUFBkqSFCxdWRPiFkLgDAAAAAC4ZmZmZ2rhxoySpa9eu8vLyKnZsjx49JElWq1WbN28ucG7Tpk3KzMwsMK4oXl5e6tKli+M5WVlZ5Yq/KCTuAAAAAOCEYbOZ5oGSHTx4UDk5OZKkFi1alDg2//l9+/YVOLd3794ix5U0T3Z2tqKioi4o3tK4ZJvTZWdnO47/SkguYSRwcTv1V4yrQwAqVWJ85XSaBcwizqj4lRnADM4aedfj+a/NL1aZ1rOuDkFSwThiY2Odji+uvPtSFh0d7Th29vOHh4c7jk+cOFFh87Rq1apUsZbWJZu4nzlzxnHc/dXKaRAAmMN7rg4AAACgRGfOnFHDhg1dHUa57Fz7gKtDKKRTp05Ox+Tffu1ykf9edWdbzfn7+zuO/97ZvqLmqQiUygMAAAAALhkZGRmO45Lub5ckb29vx3F6enqlzFMRLtkV9zZt2jgaEtSsWVMeHpfsj2oasbGxjk/9Nm7cqLp167o4IqBi8R7HpYz3Ny51vMerXnZ2tqMKtk2bNi6Opmzq1KlTqHzaLGJjYystz7FYLOWe4/PPP9c999xT/mDKwMfHx3Gc21yuOFar1XHs6+tbKfNUhEs2m/Xx8VFERISrw7hs1a1b97K8nwaXD97juJTx/saljvd41bnYy+M9PDxM+14xa1xmEBgY6Dh2VraemprqOP57OXxFzVMRLtnEHQAAAABw4f7eXb0sXFnVkv9DjfwN5oqSv6Iif4O5oubp2LFjmeapCCTuAAAAAAAHZ1ufmV3z5s3l7u6unJwc7d+/v8Sx+c+3bNmywLn8neFLO4+Hh4eaNWt2oSE7RXM6AAAAAMAlw8vLy9HTYsOGDSXenx4ZGSnJ3lzu7yvqERERjqZ0ueOKkpmZqd9//93xHE9Pz3LFXxQSdwAAAADAJWXIkCGSpKSkJH3//fdFjomOjtbKlSslSX369ClwT7tkv8e9T58+kqSVK1cWW3b//fffKykpSZI0dOjQigi/EBJ3AAAAAMBF4+jRo7JYLLJYLOrZs2eRY8aNG6dq1apJkp555hnFx8cXOJ+Tk6OHHnpIOTk5kqRJkyYVOc9TTz0lyb5LwsMPP+wYnysuLk6TJ0+WJAUHB2vcuHFl/rlKwj3uAAAAAIAqcejQIa1du7bA93I7tqekpGjWrFkFzvXr10916tS54NcJDQ3V1KlT9cADD+jYsWPq3Lmznn/+ebVp00YnT57U+++/r9WrV0uS7rjjjmI/AOjdu7dGjhypr7/+WkuWLNENN9ygiRMnql69etq1a5dee+01HT9+XJI0depUhYSEXHCspUHiDgAAAACoEmvXrtW9995b5Ln4+PhC51avXl2mxF2SJkyYoJMnT+qVV17R4cOHNXbs2EJj+vfvr88++6zEeT777DMlJSVp2bJlWr16tSPhz+Xm5qYXXnhB48ePL1OcpUHijgoTFhYmwzBcHQZQaXiP41LG+xuXOt7jwOXp5Zdf1k033aSPP/5Yv/32m06dOqXg4GBdffXVuvfee3XHHXc4ncPX11dLly7V3LlzNWvWLO3YsUMJCQmqXbu2rr/+ej3yyCPq2rVrpf4cFoPfYAAAAAAAmBbN6QAAAAAAMDESdwAAAAAATIzEHQAAAAAAEyNxBwAAAADAxEjcAQAAAAAwMRJ3AAAAAABMjMQdAAAAAAATI3EHAAAAAMDESNwBAAAAADAxEndUiFmzZslischisejo0aOuDgcAAAAALhkk7gAAAAAAmBiJOwAAAAAAJkbiDgAAAACAiZG4o1zWrFkji8Wie++91/G9Ro0aOe53z32sWbPGdUEC5bB79269+uqruummmxQWFiZvb28FBASoWbNmGjNmjH7//XdXhwhcsLS0NAUGBspisWj06NFOx2/YsMHx+/yTTz6pggiB8svff6c0jylTprg6ZAAoloerAwAAs1qzZo169epV6PuZmZk6dOiQDh06pC+++ELPPPOM3njjDRdECJSNn5+fhgwZoq+++kqLFy9Wamqq/P39ix0/Z84cSZKHh4duv/32qgoTAACcR+KOcomIiNCuXbu0ePFi/fOf/5QkLV++XPXq1SswrlGjRq4IDyiX7Oxs+fv7a8CAAerdu7datGihoKAgnT59Wnv27NEHH3ygY8eO6c0331Tz5s0LVJ4AZjd69Gh99dVXSk1N1eLFizVq1Kgix2VnZ2v+/PmSpJtuukk1atSoyjCBMhsyZIg6duxY4phJkybpp59+kiQ1aNCgKsICgDKxGIZhuDoIXPxmzZrlSFqOHDmihg0bujYgoALExcXJw8NDwcHBRZ7PzMzUwIED9fPPP6tBgwY6fPiw3N3dqzZIoIyys7NVv359nT59WgMGDNCPP/5Y5LiffvpJN998syRp7ty5uuOOO6oyTKDSfPzxx3rkkUck5X2QBQBmxT3uAFCMGjVqFJu0S5KXl5fefvttSdKxY8e0ffv2qgkMqAAeHh4aMWKEJGnFihWKj48vclxumXxAQIAGDx5cZfEBlWnVqlWaOHGiJKlTp06aMWOGawMCACdI3AGglKxWq44fP669e/dq9+7d2r17t/IXLe3YscOF0QEXLrcxXVZWlr799ttC59PT07Vo0SJJ9rJjPz+/qgwPqBRRUVG67bbbHFUnixYtko+Pj6vDAoAScY87AJQgNTVVH3zwgb7++mvt2bNHOTk5xY6Ni4urwsiA8uvcubOaNGmiw4cPa86cOXrwwQcLnF+yZIlSUlIkqVTd5wGzS0hI0KBBg3Tu3Dn5+vpq8eLFqlu3rqvDAgCnWHEHgGIcPXpUbdq00XPPPaedO3eWmLRL9tVJ4GKTm5CvX79eR48eLXAut0y+Vq1a6tu3b1WHBlSonJwcjRgxQgcOHJDFYtGsWbPUoUMHV4cFAKVC4g4Axbjrrrt05MgRWSwWjR07VitWrNCJEyeUkZEhm80mwzAKJPP0+sTFKDdxNwxD8+bNc3z/7NmzWr58uSRpxIgR8vCgSA8XtyeeeEIrVqyQJL3wwgtsbQjgokLiDgBF2L9/v9auXStJeu655zRz5kzdcMMNCgsLk7e3tywWiyR7cgNczJo3b+7YMmvu3LmO7y9YsECZmZmSKJPHxW/69On68MMPJUnDhw/XlClTXBsQAFwgEndUiNwkBrhU7Nmzx3Gc23m7KJs3b66KcIBKlZuY7969Wzt37pSUVybfpEkTde7c2WWxAeW1Zs0ax7Zv7du31+zZs7luAXDRIXFHhcjfjdVqtbowEqBiZGdnO45TU1OLHffpp59WRThApRo5cqTc3d0l2RP26Oho/fbbb5JYbcfF7fDhwxo+fLiysrJUu3ZtLV68mN0RAFyUSNxRIfJ3ZD18+LALIwEqRrNmzRzHs2bNKnLMf/7zHy1evLiKIgIqT506ddS7d29J0rx58zR37lxHzwYSd1ysEhMTNWjQIMXHx8vb21uLFi1SeHi4q8MCgDKh0wwqRPv27eXj46OMjAy98MIL8vT0VIMGDeTmZv9sqH79+vL19XVxlEDptW/fXldddZV2796tadOm6dy5c7rrrrtUt25dRUdH66uvvtKCBQt03XXXad26da4OFyi30aNH6+eff9aJEyf0xhtvSJI6duyo5s2buzgyoGweffRR7du3T5I0ceJEBQQEaPfu3cWOr1WrlmrVqlVV4QHABbEYtEFGBZk8ebLeeuutIs+tXr1aPXv2rNqAgHLavn27evfurXPnzhV5vk2bNlq+fLnq1asnSXrppZdoeISLVnJysmrXrl1gW8P33ntPEydOdF1QQDn07NlTkZGRpR7P73AAZkapPCrMm2++qf/+97+6/vrrFRoa6rhfErhYtWvXTtu3b9cDDzygBg0ayNPTU6GhoerUqZPeeecdbdy4scBtIsDFLDAwUIMGDXJ87e7urpEjR7owIgAAkIsVdwAAAAAATIwVdwAAAAAATIzEHQAAAAAAEyNxBwAAAADAxEjcAQAAAAAwMRJ3AAAAAABMjMQdAAAAAAATI3EHAAAAAMDESNwBAAAAADAxEncAAAAAAEyMxB0AAAAAABMjcQcAAAAAwMRI3AEAAAAAMDESdwAAAAAATIzEHQAAAAAAEyNxBwAAAADAxEjcAQAAAAAwMRJ3AAAAAABMjMQdAAAAAAATI3EHAAAAAMDESNwBAAAAADAxEncAAAAAAEyMxB0AAAAAABMjcQcAAAAAwMRI3AEAAAAAMDESdwAAAAAATOz/AacCOgYZyHLIAAAAAElFTkSuQmCC\", \"__metadata__\": {\"image/png\": {\"width\": 503, \"height\": 434}}}"
+ }
+ ]
+ },
+ {
+ "id": "mfOT",
+ "code_hash": "17be40e791fcb936ab77687ed79579e0",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "'analytical' "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "bMrW",
+ "code_hash": "71e47d37fdf094a6b34a88a806548115",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "OfTS",
+ "code_hash": "081dc83996ffbb5854c272909689e338",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: angle\n\nResponse variable: rt,response\nLikelihood: approx_differentiable\nObservations: 1000\n\nParameters:\n\nv:\n Prior: Uniform(lower: -3.0, upper: 3.0)\n Explicit bounds: (-3.0, 3.0)\n\na:\n Prior: Uniform(lower: 0.3, upper: 3.0)\n Explicit bounds: (0.3, 3.0)\n\nz:\n Prior: Uniform(lower: 0.1, upper: 0.9)\n Explicit bounds: (0.1, 0.9)\n\nt:\n Prior: Uniform(lower: 0.001, upper: 2.0)\n Explicit bounds: (0.001, 2.0)\n\ntheta:\n Prior: Uniform(lower: -0.1, upper: 1.3)\n Explicit bounds: (-0.1, 1.3)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "Plbk",
+ "code_hash": "b70f66e871dcca6defb50ab00885a226",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "HuZB",
+ "code_hash": "2c17433d975e01efa6a1278e720c97e3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "'approx_differentiable' "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Ynfw",
+ "code_hash": "795040c7e96b67a9d6f4548a6e0408ff",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "\r 0%| | 0/1000 [00:00, ?it/s]\rwarmup: 0%| | 1/1000 [00:01<28:51, 1.73s/it, 1 steps of size 2.34e+00. acc. prob=0.00]\rwarmup: 1%|\u258b | 8/1000 [00:01<02:54, 5.67it/s, 127 steps of size 1.23e-02. acc. prob=0.53]\rwarmup: 1%|\u2589 | 12/1000 [00:02<02:01, 8.16it/s, 127 steps of size 7.04e-03. acc. prob=0.61]\rwarmup: 2%|\u2588\u258f | 15/1000 [00:02<01:35, 10.30it/s, 11 steps of size 1.01e-02. acc. prob=0.65]\rwarmup: 2%|\u2588\u258b | 21/1000 [00:02<00:59, 16.54it/s, 31 steps of size 3.17e-02. acc. prob=0.71]\rwarmup: 3%|\u2588\u2588\u258f | 27/1000 [00:02<00:43, 22.46it/s, 47 steps of size 1.89e-02. acc. prob=0.72]\rwarmup: 3%|\u2588\u2588\u258d | 31/1000 [00:02<00:39, 24.25it/s, 111 steps of size 7.74e-03. acc. prob=0.71]\rwarmup: 4%|\u2588\u2588\u2588 | 38/1000 [00:02<00:29, 32.28it/s, 31 steps of size 1.94e-02. acc. prob=0.74]\rwarmup: 4%|\u2588\u2588\u2588\u258d | 44/1000 [00:02<00:25, 37.32it/s, 15 steps of size 1.78e-02. acc. prob=0.74]\rwarmup: 5%|\u2588\u2588\u2588\u258a | 49/1000 [00:02<00:26, 35.86it/s, 31 steps of size 2.32e-02. acc. prob=0.75]\rwarmup: 6%|\u2588\u2588\u2588\u2588\u258b | 59/1000 [00:03<00:19, 48.51it/s, 31 steps of size 1.45e-02. acc. prob=0.75]\rwarmup: 6%|\u2588\u2588\u2588\u2588\u2588\u258f | 65/1000 [00:03<00:21, 43.27it/s, 63 steps of size 1.62e-02. acc. prob=0.75]\rwarmup: 7%|\u2588\u2588\u2588\u2588\u2588\u258a | 73/1000 [00:03<00:19, 46.79it/s, 47 steps of size 1.84e-02. acc. prob=0.76]\rwarmup: 8%|\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 82/1000 [00:03<00:16, 56.02it/s, 15 steps of size 1.75e-02. acc. prob=0.76]\rwarmup: 9%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 90/1000 [00:03<00:15, 58.15it/s, 31 steps of size 3.05e-02. acc. prob=0.77]\rwarmup: 10%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 97/1000 [00:03<00:15, 57.47it/s, 31 steps of size 1.23e-02. acc. prob=0.76]\rwarmup: 10%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 104/1000 [00:03<00:18, 47.66it/s, 3 steps of size 1.08e-01. acc. prob=0.77]\rwarmup: 11%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 110/1000 [00:04<00:18, 47.88it/s, 63 steps of size 1.18e-01. acc. prob=0.77]\rwarmup: 12%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 116/1000 [00:04<00:17, 49.18it/s, 15 steps of size 8.28e-01. acc. prob=0.78]\rwarmup: 12%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 124/1000 [00:04<00:15, 56.21it/s, 31 steps of size 1.94e-01. acc. prob=0.77]\rwarmup: 13%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 130/1000 [00:04<00:15, 56.73it/s, 3 steps of size 1.38e-01. acc. prob=0.77]\rwarmup: 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 138/1000 [00:04<00:14, 60.67it/s, 15 steps of size 4.08e-01. acc. prob=0.78]\rwarmup: 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 145/1000 [00:04<00:14, 59.56it/s, 23 steps of size 7.40e-01. acc. prob=0.78]\rwarmup: 15%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 154/1000 [00:04<00:14, 58.01it/s, 63 steps of size 7.13e-02. acc. prob=0.77]\rwarmup: 16%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 161/1000 [00:04<00:14, 58.06it/s, 31 steps of size 3.32e-01. acc. prob=0.78]\rwarmup: 17%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 167/1000 [00:05<00:18, 45.92it/s, 15 steps of size 3.92e-01. acc. prob=0.78]\rwarmup: 18%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 179/1000 [00:05<00:13, 60.91it/s, 15 steps of size 3.02e-01. acc. prob=0.78]\rwarmup: 19%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 188/1000 [00:05<00:12, 67.14it/s, 7 steps of size 1.11e-01. acc. prob=0.78]\rwarmup: 20%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 196/1000 [00:05<00:13, 59.31it/s, 3 steps of size 3.47e-01. acc. prob=0.78]\rwarmup: 20%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 203/1000 [00:05<00:14, 54.71it/s, 11 steps of size 1.80e-01. acc. prob=0.78]\rwarmup: 21%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 212/1000 [00:05<00:12, 60.79it/s, 15 steps of size 3.81e-01. acc. prob=0.78]\rwarmup: 22%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 219/1000 [00:05<00:12, 62.65it/s, 31 steps of size 3.05e-01. acc. prob=0.78]\rwarmup: 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 226/1000 [00:06<00:12, 62.45it/s, 7 steps of size 3.03e-01. acc. prob=0.78]\rwarmup: 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 233/1000 [00:06<00:12, 62.66it/s, 7 steps of size 4.33e-01. acc. prob=0.78]\rwarmup: 24%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 240/1000 [00:06<00:11, 64.53it/s, 11 steps of size 3.86e-01. acc. prob=0.78]\rwarmup: 25%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 248/1000 [00:06<00:11, 66.31it/s, 15 steps of size 2.01e-01. acc. prob=0.78]\rwarmup: 26%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 255/1000 [00:06<00:11, 62.50it/s, 47 steps of size 1.57e-01. acc. prob=0.78]\rwarmup: 26%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 262/1000 [00:06<00:12, 61.33it/s, 15 steps of size 3.91e-01. acc. prob=0.78]\rwarmup: 27%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 269/1000 [00:06<00:12, 60.17it/s, 7 steps of size 3.90e-01. acc. prob=0.78]\rwarmup: 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 278/1000 [00:06<00:11, 64.86it/s, 31 steps of size 4.57e-01. acc. prob=0.78]\rwarmup: 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 285/1000 [00:06<00:10, 65.61it/s, 3 steps of size 1.78e-01. acc. prob=0.78]\rwarmup: 29%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 292/1000 [00:07<00:12, 56.63it/s, 7 steps of size 4.15e-01. acc. prob=0.78]\rwarmup: 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 298/1000 [00:07<00:12, 56.91it/s, 7 steps of size 1.63e-01. acc. prob=0.78]\rwarmup: 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 304/1000 [00:07<00:12, 56.30it/s, 15 steps of size 3.17e-01. acc. prob=0.78]\rwarmup: 31%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 313/1000 [00:07<00:10, 63.08it/s, 31 steps of size 1.36e-01. acc. prob=0.78]\rwarmup: 32%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 321/1000 [00:07<00:10, 64.12it/s, 31 steps of size 4.73e-01. acc. prob=0.78]\rwarmup: 33%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 328/1000 [00:07<00:12, 54.86it/s, 7 steps of size 5.91e-01. acc. prob=0.78]\rwarmup: 34%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 337/1000 [00:07<00:11, 58.55it/s, 31 steps of size 1.87e-01. acc. prob=0.78]\rwarmup: 34%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 344/1000 [00:07<00:11, 55.37it/s, 7 steps of size 3.29e-01. acc. prob=0.78]\rwarmup: 35%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 351/1000 [00:08<00:11, 57.46it/s, 7 steps of size 3.84e-01. acc. prob=0.78]\rwarmup: 36%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 358/1000 [00:08<00:10, 58.68it/s, 15 steps of size 3.92e-01. acc. prob=0.78]\rwarmup: 36%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 364/1000 [00:08<00:13, 48.15it/s, 15 steps of size 3.09e-01. acc. prob=0.78]\rwarmup: 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 372/1000 [00:08<00:11, 54.30it/s, 15 steps of size 3.07e-01. acc. prob=0.78]\rwarmup: 38%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 382/1000 [00:08<00:09, 62.67it/s, 15 steps of size 3.66e-01. acc. prob=0.78]\rwarmup: 39%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 389/1000 [00:08<00:10, 58.61it/s, 7 steps of size 4.49e-01. acc. prob=0.79]\rwarmup: 40%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 396/1000 [00:08<00:10, 59.12it/s, 15 steps of size 3.60e-01. acc. prob=0.79]\rwarmup: 40%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 403/1000 [00:09<00:10, 57.53it/s, 31 steps of size 2.10e-01. acc. prob=0.78]\rwarmup: 41%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 410/1000 [00:09<00:10, 56.41it/s, 31 steps of size 2.69e-01. acc. prob=0.79]\rwarmup: 42%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 416/1000 [00:09<00:10, 54.72it/s, 15 steps of size 3.04e-01. acc. prob=0.79]\rwarmup: 42%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 422/1000 [00:09<00:10, 55.50it/s, 15 steps of size 2.49e-01. acc. prob=0.79]\rwarmup: 43%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 432/1000 [00:09<00:08, 66.71it/s, 7 steps of size 3.39e-01. acc. prob=0.79]\rwarmup: 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 441/1000 [00:09<00:08, 69.54it/s, 15 steps of size 4.03e-01. acc. prob=0.79]\rwarmup: 45%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 449/1000 [00:09<00:09, 61.16it/s, 7 steps of size 4.00e-01. acc. prob=0.79]\rwarmup: 46%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 459/1000 [00:09<00:07, 70.66it/s, 7 steps of size 5.33e-01. acc. prob=0.79]\rwarmup: 47%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 467/1000 [00:10<00:14, 37.29it/s, 127 steps of size 1.15e-01. acc. prob=0.78]\rwarmup: 47%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 473/1000 [00:10<00:15, 34.28it/s, 31 steps of size 1.95e-01. acc. prob=0.78]\rwarmup: 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 478/1000 [00:10<00:15, 33.32it/s, 31 steps of size 2.17e-01. acc. prob=0.78]\rwarmup: 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 483/1000 [00:10<00:15, 33.99it/s, 31 steps of size 1.42e-01. acc. prob=0.78]\rwarmup: 49%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 490/1000 [00:10<00:12, 40.07it/s, 7 steps of size 8.61e-02. acc. prob=0.78]\rwarmup: 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 495/1000 [00:11<00:12, 40.98it/s, 11 steps of size 9.47e-02. acc. prob=0.78]\rwarmup: 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 500/1000 [00:11<00:12, 38.93it/s, 15 steps of size 2.44e-01. acc. prob=0.78]\rsample: 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 505/1000 [00:11<00:11, 41.30it/s, 19 steps of size 2.44e-01. acc. prob=0.91]\rsample: 51%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 511/1000 [00:11<00:10, 45.55it/s, 7 steps of size 2.44e-01. acc. prob=0.94]\rsample: 52%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 516/1000 [00:11<00:11, 41.83it/s, 15 steps of size 2.44e-01. acc. prob=0.96]\rsample: 52%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 523/1000 [00:11<00:10, 46.26it/s, 19 steps of size 2.44e-01. acc. prob=0.96]\rsample: 53%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 528/1000 [00:11<00:10, 42.96it/s, 31 steps of size 2.44e-01. acc. prob=0.96]\rsample: 53%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 533/1000 [00:11<00:11, 39.37it/s, 15 steps of size 2.44e-01. acc. prob=0.96]\rsample: 54%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 538/1000 [00:12<00:11, 41.12it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 54%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 543/1000 [00:12<00:11, 40.19it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 55%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 548/1000 [00:12<00:11, 39.98it/s, 15 steps of size 2.44e-01. acc. prob=0.96]\rsample: 56%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 556/1000 [00:12<00:09, 46.69it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 56%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 563/1000 [00:12<00:08, 50.79it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 569/1000 [00:12<00:08, 50.77it/s, 7 steps of size 2.44e-01. acc. prob=0.94]\rsample: 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 575/1000 [00:12<00:08, 48.69it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 58%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 580/1000 [00:12<00:08, 48.53it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 58%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 585/1000 [00:13<00:10, 41.13it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 590/1000 [00:13<00:10, 39.48it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 597/1000 [00:13<00:09, 44.25it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 604/1000 [00:13<00:07, 50.38it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 61%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 612/1000 [00:13<00:07, 54.09it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 62%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 618/1000 [00:13<00:08, 44.52it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 62%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 623/1000 [00:13<00:08, 44.58it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 630/1000 [00:14<00:07, 48.56it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 636/1000 [00:14<00:08, 43.61it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 641/1000 [00:14<00:08, 44.41it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 647/1000 [00:14<00:07, 48.16it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 654/1000 [00:14<00:06, 52.40it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 66%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 660/1000 [00:14<00:06, 49.93it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 67%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 666/1000 [00:14<00:08, 40.19it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 67%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 671/1000 [00:14<00:07, 41.72it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 676/1000 [00:15<00:07, 42.85it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 681/1000 [00:15<00:07, 42.35it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 69%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 686/1000 [00:15<00:07, 42.70it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 69%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 692/1000 [00:15<00:06, 46.43it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 697/1000 [00:15<00:06, 46.11it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 703/1000 [00:15<00:06, 48.79it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 71%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 709/1000 [00:15<00:05, 50.85it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 715/1000 [00:15<00:05, 50.11it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 721/1000 [00:15<00:05, 52.44it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 727/1000 [00:16<00:05, 52.90it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 733/1000 [00:16<00:05, 48.39it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 74%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 738/1000 [00:16<00:07, 34.70it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 74%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 743/1000 [00:16<00:07, 35.03it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 75%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 748/1000 [00:16<00:06, 36.82it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 75%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 754/1000 [00:16<00:06, 40.82it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 761/1000 [00:16<00:05, 46.23it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 77%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 766/1000 [00:17<00:05, 43.78it/s, 11 steps of size 2.44e-01. acc. prob=0.95]\rsample: 77%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 771/1000 [00:17<00:05, 43.88it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 78%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 776/1000 [00:17<00:05, 41.41it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 78%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 781/1000 [00:17<00:05, 42.57it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 786/1000 [00:17<00:05, 42.01it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 794/1000 [00:17<00:03, 51.77it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 80%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 800/1000 [00:17<00:04, 49.52it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 81%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 806/1000 [00:17<00:03, 49.63it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 81%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 812/1000 [00:18<00:04, 43.93it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 82%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 818/1000 [00:18<00:03, 47.69it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 82%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 825/1000 [00:18<00:03, 50.76it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 83%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 832/1000 [00:18<00:03, 53.18it/s, 19 steps of size 2.44e-01. acc. prob=0.95]\rsample: 84%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 838/1000 [00:18<00:03, 48.71it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 84%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 844/1000 [00:18<00:03, 48.28it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 85%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 850/1000 [00:18<00:02, 50.50it/s, 3 steps of size 2.44e-01. acc. prob=0.95]\rsample: 86%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 856/1000 [00:19<00:03, 46.86it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 86%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 862/1000 [00:19<00:02, 49.48it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 87%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 868/1000 [00:19<00:03, 40.55it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 877/1000 [00:19<00:02, 50.70it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 884/1000 [00:19<00:02, 44.79it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 89%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 889/1000 [00:19<00:02, 43.07it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 89%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 894/1000 [00:19<00:02, 43.83it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 899/1000 [00:19<00:02, 41.84it/s, 27 steps of size 2.44e-01. acc. prob=0.95]\rsample: 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 904/1000 [00:20<00:02, 40.50it/s, 11 steps of size 2.44e-01. acc. prob=0.95]\rsample: 91%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 909/1000 [00:20<00:02, 40.48it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 91%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 914/1000 [00:20<00:02, 41.41it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 92%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 919/1000 [00:20<00:01, 42.81it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 926/1000 [00:20<00:01, 45.21it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 932/1000 [00:20<00:01, 46.95it/s, 23 steps of size 2.44e-01. acc. prob=0.95]\rsample: 94%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 939/1000 [00:20<00:01, 50.58it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 94%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 945/1000 [00:21<00:01, 40.10it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 95%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 950/1000 [00:21<00:01, 40.38it/s, 11 steps of size 2.44e-01. acc. prob=0.95]\rsample: 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 955/1000 [00:21<00:01, 34.62it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 959/1000 [00:21<00:01, 34.83it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 963/1000 [00:21<00:01, 33.64it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 97%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 968/1000 [00:21<00:00, 37.43it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 97%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 974/1000 [00:21<00:00, 42.28it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 98%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 979/1000 [00:21<00:00, 42.79it/s, 15 steps of size 2.44e-01. acc. prob=0.95]\rsample: 98%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 984/1000 [00:22<00:00, 38.17it/s, 31 steps of size 2.44e-01. acc. prob=0.95]\rsample: 99%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f| 989/1000 [00:22<00:00, 40.46it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 99%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c| 994/1000 [00:22<00:00, 40.26it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\rsample: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1000/1000 [00:22<00:00, 44.53it/s, 7 steps of size 2.44e-01. acc. prob=0.95]\n\r 0%| | 0/1000 [00:00, ?it/s]\rwarmup: 1%|\u258b | 8/1000 [00:00<00:36, 26.92it/s, 255 steps of size 3.86e-03. acc. prob=0.49]\rwarmup: 1%|\u258a | 11/1000 [00:00<01:23, 11.78it/s, 31 steps of size 2.25e-02. acc. prob=0.62]\rwarmup: 1%|\u2588 | 14/1000 [00:00<01:08, 14.35it/s, 23 steps of size 1.11e-02. acc. prob=0.64]\rwarmup: 2%|\u2588\u258c | 19/1000 [00:01<00:51, 19.18it/s, 63 steps of size 8.15e-03. acc. prob=0.68]\rwarmup: 2%|\u2588\u2589 | 24/1000 [00:01<00:43, 22.36it/s, 63 steps of size 8.97e-03. acc. prob=0.70]\rwarmup: 3%|\u2588\u2588\u258f | 27/1000 [00:01<00:41, 23.46it/s, 15 steps of size 1.46e-02. acc. prob=0.71]\rwarmup: 3%|\u2588\u2588\u258d | 31/1000 [00:01<00:38, 25.02it/s, 63 steps of size 8.86e-03. acc. prob=0.72]\rwarmup: 4%|\u2588\u2588\u2589 | 37/1000 [00:01<00:31, 30.29it/s, 47 steps of size 1.10e-02. acc. prob=0.73]\rwarmup: 4%|\u2588\u2588\u2588\u258e | 42/1000 [00:01<00:27, 34.31it/s, 15 steps of size 1.97e-02. acc. prob=0.74]\rwarmup: 5%|\u2588\u2588\u2588\u2589 | 50/1000 [00:01<00:23, 40.41it/s, 63 steps of size 1.39e-02. acc. prob=0.74]\rwarmup: 6%|\u2588\u2588\u2588\u2588\u258c | 58/1000 [00:02<00:20, 46.30it/s, 31 steps of size 2.53e-02. acc. prob=0.75]\rwarmup: 6%|\u2588\u2588\u2588\u2588\u2588 | 63/1000 [00:02<00:20, 46.11it/s, 7 steps of size 1.25e-02. acc. prob=0.75]\rwarmup: 7%|\u2588\u2588\u2588\u2588\u2588\u258c | 71/1000 [00:02<00:19, 47.00it/s, 63 steps of size 1.07e-02. acc. prob=0.75]\rwarmup: 8%|\u2588\u2588\u2588\u2588\u2588\u2588 | 77/1000 [00:02<00:19, 48.40it/s, 23 steps of size 1.11e-02. acc. prob=0.76]\rwarmup: 8%|\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 83/1000 [00:02<00:18, 50.64it/s, 31 steps of size 1.48e-02. acc. prob=0.76]\rwarmup: 9%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 89/1000 [00:02<00:17, 51.86it/s, 15 steps of size 2.95e-02. acc. prob=0.77]\rwarmup: 10%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 97/1000 [00:02<00:15, 57.43it/s, 23 steps of size 2.81e-02. acc. prob=0.77]\rwarmup: 10%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 103/1000 [00:03<00:27, 32.48it/s, 7 steps of size 2.33e-01. acc. prob=0.77]\rwarmup: 11%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 110/1000 [00:03<00:25, 34.56it/s, 63 steps of size 1.31e-01. acc. prob=0.77]\rwarmup: 12%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 115/1000 [00:03<00:24, 36.83it/s, 3 steps of size 8.23e-02. acc. prob=0.77]\rwarmup: 12%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 120/1000 [00:03<00:25, 33.87it/s, 7 steps of size 1.20e-01. acc. prob=0.77]\rwarmup: 12%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 124/1000 [00:03<00:25, 34.62it/s, 3 steps of size 4.59e-01. acc. prob=0.77]\rwarmup: 13%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 129/1000 [00:03<00:23, 36.79it/s, 31 steps of size 4.21e-01. acc. prob=0.78]\rwarmup: 13%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 134/1000 [00:03<00:25, 33.59it/s, 15 steps of size 4.90e-01. acc. prob=0.78]\rwarmup: 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 141/1000 [00:04<00:22, 38.73it/s, 31 steps of size 3.07e-01. acc. prob=0.78]\rwarmup: 15%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 146/1000 [00:04<00:20, 40.97it/s, 15 steps of size 3.92e-01. acc. prob=0.78]\rwarmup: 15%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 154/1000 [00:04<00:17, 48.33it/s, 15 steps of size 3.34e-01. acc. prob=0.77]\rwarmup: 16%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 160/1000 [00:04<00:19, 44.05it/s, 7 steps of size 5.52e-01. acc. prob=0.78]\rwarmup: 16%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 165/1000 [00:04<00:18, 44.66it/s, 15 steps of size 4.62e-01. acc. prob=0.78]\rwarmup: 17%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 170/1000 [00:04<00:19, 42.84it/s, 15 steps of size 2.44e-01. acc. prob=0.78]\rwarmup: 18%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 175/1000 [00:04<00:22, 36.43it/s, 31 steps of size 3.63e-01. acc. prob=0.78]\rwarmup: 18%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 182/1000 [00:05<00:18, 43.56it/s, 19 steps of size 4.40e-01. acc. prob=0.78]\rwarmup: 19%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 187/1000 [00:05<00:18, 44.58it/s, 15 steps of size 2.77e-01. acc. prob=0.78]\rwarmup: 19%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 192/1000 [00:05<00:18, 43.56it/s, 63 steps of size 2.07e-01. acc. prob=0.78]\rwarmup: 20%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 199/1000 [00:05<00:16, 49.53it/s, 15 steps of size 4.89e-01. acc. prob=0.78]\rwarmup: 21%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 207/1000 [00:05<00:14, 54.49it/s, 31 steps of size 2.78e-01. acc. prob=0.78]\rwarmup: 22%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 215/1000 [00:05<00:13, 60.15it/s, 7 steps of size 5.18e-01. acc. prob=0.78]\rwarmup: 22%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 222/1000 [00:05<00:14, 53.06it/s, 15 steps of size 2.82e-01. acc. prob=0.78]\rwarmup: 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 228/1000 [00:05<00:14, 54.02it/s, 3 steps of size 2.42e-01. acc. prob=0.78]\rwarmup: 24%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 237/1000 [00:06<00:12, 60.95it/s, 15 steps of size 2.90e-01. acc. prob=0.78]\rwarmup: 24%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 244/1000 [00:06<00:13, 56.16it/s, 7 steps of size 7.60e-01. acc. prob=0.78]\rwarmup: 25%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 251/1000 [00:06<00:13, 56.91it/s, 23 steps of size 3.05e+00. acc. prob=0.78]\rwarmup: 26%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 258/1000 [00:06<00:12, 59.08it/s, 7 steps of size 8.50e-01. acc. prob=0.78]\rwarmup: 26%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 265/1000 [00:06<00:17, 41.87it/s, 11 steps of size 5.19e-01. acc. prob=0.78]\rwarmup: 27%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 270/1000 [00:06<00:19, 37.52it/s, 15 steps of size 4.37e-01. acc. prob=0.78]\rwarmup: 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 275/1000 [00:06<00:18, 39.71it/s, 15 steps of size 3.34e-01. acc. prob=0.78]\rwarmup: 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 280/1000 [00:07<00:19, 36.30it/s, 31 steps of size 3.11e-01. acc. prob=0.78]\rwarmup: 29%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 286/1000 [00:07<00:17, 39.77it/s, 15 steps of size 4.15e-01. acc. prob=0.78]\rwarmup: 29%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 292/1000 [00:07<00:16, 43.60it/s, 7 steps of size 7.09e-01. acc. prob=0.78]\rwarmup: 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 297/1000 [00:07<00:15, 44.76it/s, 7 steps of size 9.97e-02. acc. prob=0.78]\rwarmup: 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 302/1000 [00:07<00:16, 43.32it/s, 7 steps of size 9.80e-02. acc. prob=0.78]\rwarmup: 31%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 307/1000 [00:07<00:16, 43.14it/s, 15 steps of size 2.74e-01. acc. prob=0.78]\rwarmup: 31%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 312/1000 [00:07<00:17, 39.69it/s, 15 steps of size 2.93e-01. acc. prob=0.78]\rwarmup: 32%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 320/1000 [00:07<00:13, 48.98it/s, 7 steps of size 4.98e-01. acc. prob=0.78]\rwarmup: 33%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 326/1000 [00:08<00:13, 51.28it/s, 15 steps of size 5.10e-01. acc. prob=0.78]\rwarmup: 33%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 332/1000 [00:08<00:12, 53.39it/s, 3 steps of size 2.77e-01. acc. prob=0.78]\rwarmup: 34%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 340/1000 [00:08<00:10, 60.47it/s, 7 steps of size 1.53e-01. acc. prob=0.78]\rwarmup: 35%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 347/1000 [00:08<00:11, 58.37it/s, 15 steps of size 2.40e-01. acc. prob=0.78]\rwarmup: 35%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 353/1000 [00:08<00:13, 47.87it/s, 15 steps of size 2.87e-01. acc. prob=0.78]\rwarmup: 36%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 359/1000 [00:08<00:12, 49.57it/s, 23 steps of size 6.66e-01. acc. prob=0.78]\rwarmup: 36%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 365/1000 [00:08<00:12, 50.65it/s, 7 steps of size 1.89e-01. acc. prob=0.78]\rwarmup: 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 371/1000 [00:08<00:12, 50.40it/s, 15 steps of size 4.12e-01. acc. prob=0.78]\rwarmup: 38%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 377/1000 [00:09<00:12, 50.52it/s, 15 steps of size 3.77e-01. acc. prob=0.78]\rwarmup: 39%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 386/1000 [00:09<00:10, 60.50it/s, 7 steps of size 3.54e-01. acc. prob=0.78]\rwarmup: 39%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 394/1000 [00:09<00:09, 62.91it/s, 15 steps of size 4.69e-01. acc. prob=0.79]\rwarmup: 40%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 401/1000 [00:09<00:09, 60.84it/s, 7 steps of size 5.11e-01. acc. prob=0.79]\rwarmup: 41%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 411/1000 [00:09<00:08, 70.08it/s, 15 steps of size 3.28e-01. acc. prob=0.79]\rwarmup: 42%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 419/1000 [00:09<00:08, 69.44it/s, 15 steps of size 2.84e-01. acc. prob=0.79]\rwarmup: 43%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 428/1000 [00:09<00:07, 74.36it/s, 7 steps of size 2.93e-01. acc. prob=0.79]\rwarmup: 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 437/1000 [00:09<00:07, 77.88it/s, 3 steps of size 3.90e-01. acc. prob=0.79]\rwarmup: 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 445/1000 [00:09<00:07, 73.98it/s, 15 steps of size 4.70e-01. acc. prob=0.79]\rwarmup: 45%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 453/1000 [00:10<00:07, 75.34it/s, 7 steps of size 4.01e-01. acc. prob=0.78]\rwarmup: 46%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 461/1000 [00:10<00:10, 50.63it/s, 15 steps of size 3.73e-01. acc. prob=0.79]\rwarmup: 47%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 468/1000 [00:10<00:11, 45.04it/s, 15 steps of size 4.85e-01. acc. prob=0.79]\rwarmup: 47%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 474/1000 [00:10<00:13, 39.90it/s, 3 steps of size 2.98e-01. acc. prob=0.79]\rwarmup: 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 480/1000 [00:10<00:12, 42.46it/s, 15 steps of size 5.77e-01. acc. prob=0.79]\rwarmup: 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 485/1000 [00:10<00:12, 40.34it/s, 31 steps of size 4.58e-01. acc. prob=0.79]\rwarmup: 49%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 490/1000 [00:11<00:13, 38.51it/s, 47 steps of size 1.09e-01. acc. prob=0.78]\rwarmup: 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 495/1000 [00:11<00:12, 39.25it/s, 3 steps of size 9.77e-02. acc. prob=0.78]\rwarmup: 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 500/1000 [00:11<00:15, 33.08it/s, 63 steps of size 2.08e-01. acc. prob=0.79]\rsample: 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 505/1000 [00:11<00:14, 34.17it/s, 31 steps of size 2.08e-01. acc. prob=0.96]\rsample: 51%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 511/1000 [00:11<00:12, 39.59it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 52%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 517/1000 [00:11<00:11, 42.98it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 52%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 522/1000 [00:11<00:11, 40.81it/s, 7 steps of size 2.08e-01. acc. prob=0.94]\rsample: 53%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 528/1000 [00:12<00:11, 41.84it/s, 19 steps of size 2.08e-01. acc. prob=0.95]\rsample: 53%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 533/1000 [00:12<00:11, 41.79it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 54%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 538/1000 [00:12<00:10, 43.29it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 54%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 543/1000 [00:12<00:11, 40.23it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 55%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 550/1000 [00:12<00:09, 45.84it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 56%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 555/1000 [00:12<00:10, 43.67it/s, 7 steps of size 2.08e-01. acc. prob=0.96]\rsample: 56%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 562/1000 [00:12<00:08, 48.71it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 567/1000 [00:12<00:09, 46.53it/s, 31 steps of size 2.08e-01. acc. prob=0.96]\rsample: 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 573/1000 [00:13<00:08, 49.82it/s, 11 steps of size 2.08e-01. acc. prob=0.96]\rsample: 58%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 579/1000 [00:13<00:08, 48.41it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 58%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 584/1000 [00:13<00:08, 48.76it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 589/1000 [00:13<00:09, 43.32it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 594/1000 [00:13<00:09, 44.44it/s, 7 steps of size 2.08e-01. acc. prob=0.96]\rsample: 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 599/1000 [00:13<00:09, 40.84it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 604/1000 [00:13<00:10, 38.93it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 61%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 608/1000 [00:13<00:10, 39.01it/s, 23 steps of size 2.08e-01. acc. prob=0.96]\rsample: 61%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 613/1000 [00:14<00:09, 39.35it/s, 23 steps of size 2.08e-01. acc. prob=0.96]\rsample: 62%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 617/1000 [00:14<00:11, 33.93it/s, 31 steps of size 2.08e-01. acc. prob=0.96]\rsample: 62%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 621/1000 [00:14<00:11, 32.76it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 62%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 625/1000 [00:14<00:11, 33.26it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 629/1000 [00:14<00:10, 34.68it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 633/1000 [00:14<00:10, 35.71it/s, 31 steps of size 2.08e-01. acc. prob=0.96]\rsample: 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 638/1000 [00:14<00:09, 37.35it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 644/1000 [00:14<00:08, 41.81it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 649/1000 [00:14<00:07, 43.99it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 654/1000 [00:15<00:08, 43.12it/s, 15 steps of size 2.08e-01. acc. prob=0.96]\rsample: 66%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 659/1000 [00:15<00:08, 41.48it/s, 7 steps of size 2.08e-01. acc. prob=0.96]\rsample: 66%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 664/1000 [00:15<00:08, 40.92it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 67%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 669/1000 [00:15<00:08, 39.82it/s, 23 steps of size 2.08e-01. acc. prob=0.96]\rsample: 67%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 674/1000 [00:15<00:07, 42.19it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 680/1000 [00:15<00:07, 42.88it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 685/1000 [00:15<00:07, 43.32it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 69%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 690/1000 [00:15<00:07, 42.78it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 695/1000 [00:16<00:07, 40.12it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 700/1000 [00:16<00:07, 41.64it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 705/1000 [00:16<00:06, 43.45it/s, 11 steps of size 2.08e-01. acc. prob=0.95]\rsample: 71%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 712/1000 [00:16<00:05, 49.46it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 718/1000 [00:16<00:05, 48.68it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 723/1000 [00:16<00:06, 41.72it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 728/1000 [00:16<00:06, 39.40it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 733/1000 [00:16<00:06, 39.18it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 74%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 739/1000 [00:17<00:06, 43.10it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 74%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 744/1000 [00:17<00:06, 40.23it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 75%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 749/1000 [00:17<00:06, 40.82it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 75%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 754/1000 [00:17<00:05, 41.37it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 759/1000 [00:17<00:05, 42.49it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 764/1000 [00:17<00:05, 39.37it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 77%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 772/1000 [00:17<00:04, 46.19it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 78%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 777/1000 [00:18<00:05, 42.31it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 78%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 782/1000 [00:18<00:05, 40.45it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 789/1000 [00:18<00:04, 44.84it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 80%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 795/1000 [00:18<00:04, 47.10it/s, 11 steps of size 2.08e-01. acc. prob=0.95]\rsample: 80%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 800/1000 [00:18<00:04, 45.79it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 81%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 806/1000 [00:18<00:04, 46.63it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 81%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 811/1000 [00:18<00:04, 44.43it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 82%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 817/1000 [00:18<00:03, 47.52it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 82%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 825/1000 [00:18<00:03, 53.39it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 83%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 831/1000 [00:19<00:03, 49.99it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 84%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 837/1000 [00:19<00:03, 46.77it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 84%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 842/1000 [00:19<00:03, 42.08it/s, 27 steps of size 2.08e-01. acc. prob=0.95]\rsample: 85%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 847/1000 [00:19<00:03, 40.88it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 85%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 852/1000 [00:19<00:03, 39.82it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 86%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 859/1000 [00:19<00:03, 45.07it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 87%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 866/1000 [00:19<00:02, 48.03it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 87%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 872/1000 [00:20<00:02, 50.28it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 878/1000 [00:20<00:02, 47.36it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 883/1000 [00:20<00:02, 43.57it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 89%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 888/1000 [00:20<00:02, 38.42it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 89%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 892/1000 [00:20<00:02, 38.67it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 897/1000 [00:20<00:02, 40.43it/s, 11 steps of size 2.08e-01. acc. prob=0.95]\rsample: 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 902/1000 [00:20<00:02, 39.58it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 91%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 907/1000 [00:20<00:02, 38.14it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 91%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 911/1000 [00:21<00:02, 37.82it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 92%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 917/1000 [00:21<00:01, 42.51it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 92%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 923/1000 [00:21<00:01, 45.32it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 928/1000 [00:21<00:01, 41.53it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 933/1000 [00:21<00:01, 38.65it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 94%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 939/1000 [00:21<00:01, 43.45it/s, 3 steps of size 2.08e-01. acc. prob=0.95]\rsample: 94%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 945/1000 [00:21<00:01, 45.72it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 95%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 950/1000 [00:21<00:01, 46.59it/s, 19 steps of size 2.08e-01. acc. prob=0.95]\rsample: 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 956/1000 [00:22<00:00, 50.13it/s, 7 steps of size 2.08e-01. acc. prob=0.95]\rsample: 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 962/1000 [00:22<00:00, 46.91it/s, 31 steps of size 2.08e-01. acc. prob=0.95]\rsample: 97%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 967/1000 [00:22<00:00, 45.15it/s, 3 steps of size 2.08e-01. acc. prob=0.95]\rsample: 97%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 972/1000 [00:22<00:00, 43.24it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 98%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 977/1000 [00:22<00:00, 44.47it/s, 3 steps of size 2.08e-01. acc. prob=0.95]\rsample: 98%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 984/1000 [00:22<00:00, 50.15it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 99%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e| 991/1000 [00:22<00:00, 53.88it/s, 23 steps of size 2.08e-01. acc. prob=0.95]\rsample: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a| 997/1000 [00:22<00:00, 50.80it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\rsample: 100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1000/1000 [00:22<00:00, 43.53it/s, 15 steps of size 2.08e-01. acc. prob=0.95]\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "LqFA",
+ "code_hash": "0cc388b62e96f764cdedb8a4d8e9940e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Lpqv",
+ "code_hash": "7a8fb0d52f13319a85e58367f4c31e6c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/json": "{\"v\": \"text/plain+float:0.5\", \"a\": \"text/plain+float:1.5\", \"z\": \"text/plain+float:0.5\", \"t\": \"text/plain+float:0.5\"}"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "upgv",
+ "code_hash": "9ef55deb01af081403927141be6f467b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "pCao",
+ "code_hash": "844342adc4b510dcc33411101d8edfbf",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "wEIy",
+ "code_hash": "df1edfd63a9ca2f98091914db349dbf1",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n \n \n
\n \n \n \n posterior \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 12kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 7 ... 493 494 495 496 497 498 499\nData variables:\n v (chain, draw) float64 8kB 0.5391 0.5541 0.5972 ... 0.4466 0.4466\nAttributes:\n created_at: 2026-01-08T04:57:00.912012+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 11.285976886749268\n tuning_steps: 500\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 \n \n
\n \n \n \n \n sample_stats \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 134kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 ... 495 496 497 498 499\nData variables: (12/18)\n divergences (chain, draw) int64 8kB 0 0 0 0 0 0 0 ... 0 0 0 0 0 0\n step_size_bar (chain, draw) float64 8kB 1.411 1.411 ... 1.397 1.397\n smallest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n acceptance_rate (chain, draw) float64 8kB 0.9887 0.8064 ... 0.4535\n perf_counter_diff (chain, draw) float64 8kB 0.0003082 ... 0.0002424\n n_steps (chain, draw) float64 8kB 3.0 1.0 1.0 ... 3.0 1.0 1.0\n ... ...\n diverging (chain, draw) bool 1kB False False ... False False\n energy_error (chain, draw) float64 8kB 0.03457 0.2151 ... 0.0\n largest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n tree_depth (chain, draw) int64 8kB 2 1 1 2 1 2 1 ... 1 1 2 2 1 1\n max_energy_error (chain, draw) float64 8kB -0.255 0.2151 ... 0.7908\n energy (chain, draw) float64 8kB 1.025e+03 ... 1.025e+03\nAttributes:\n created_at: 2026-01-08T04:57:00.923531+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 11.285976886749268\n tuning_steps: 500\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (18)
divergences
(chain, draw)
int64
0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n...\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) step_size_bar
(chain, draw)
float64
1.411 1.411 1.411 ... 1.397 1.397
array([[1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n 1.41083455, 1.41083455, 1.41083455, 1.41083455, 1.41083455,\n...\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272,\n 1.39712272, 1.39712272, 1.39712272, 1.39712272, 1.39712272]]) smallest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) acceptance_rate
(chain, draw)
float64
0.9887 0.8064 0.3978 ... 1.0 0.4535
array([[0.98867279, 0.8064481 , 0.3978153 , 1. , 1. ,\n 1. , 1. , 0.96176404, 0.69649324, 0.69232848,\n 0.60761544, 0.836336 , 0.85757716, 0.95855766, 0.82235842,\n 0.94551875, 1. , 0.97592472, 0.84170957, 1. ,\n 1. , 0.82388629, 1. , 0.99367711, 1. ,\n 0.99127057, 0.87728733, 0.99713153, 0.63302488, 1. ,\n 0.96971915, 1. , 1. , 1. , 0.96269833,\n 0.91129079, 1. , 1. , 0.86792342, 1. ,\n 0.96796978, 0.9915102 , 0.398191 , 1. , 1. ,\n 0.63112244, 1. , 1. , 0.6550516 , 0.87666524,\n 0.98574897, 1. , 0.9759836 , 0.8020472 , 0.56202566,\n 1. , 1. , 0.82861131, 0.84584907, 1. ,\n 0.92699986, 0.80602321, 1. , 0.99421474, 1. ,\n 0.78776046, 1. , 0.93049912, 0.79158332, 1. ,\n 0.97617492, 0.91723846, 0.76648838, 0.60657884, 1. ,\n 1. , 0.68971865, 0.61833792, 1. , 0.91433669,\n 1. , 1. , 0.80675325, 0.5569258 , 1. ,\n 0.72924249, 0.83534894, 0.99494659, 0.92238806, 1. ,\n 1. , 0.83426108, 0.7607537 , 0.8358358 , 0.85968936,\n 0.99090495, 0.99847075, 0.87863553, 1. , 0.5913679 ,\n...\n 1. , 1. , 0.28388574, 0.65032972, 1. ,\n 0.50226256, 0.92292681, 0.66577167, 0.98862496, 0.78418066,\n 1. , 0.63584694, 0.97266162, 1. , 0.98444179,\n 0.97302937, 0.86171995, 0.94570186, 1. , 0.91703306,\n 0.99091395, 0.99238016, 1. , 0.99557678, 0.24608386,\n 1. , 0.77169739, 1. , 0.20351557, 0.97188174,\n 1. , 0.98935872, 0.55089353, 0.97920037, 0.79996359,\n 0.96420262, 1. , 0.99713476, 0.88798204, 0.9519531 ,\n 1. , 0.92314743, 0.97086933, 0.83503536, 0.93587777,\n 1. , 0.47626685, 1. , 0.45496345, 1. ,\n 1. , 0.93892521, 0.67246789, 1. , 0.91186647,\n 1. , 1. , 0.45191415, 1. , 0.97014838,\n 0.97059632, 1. , 1. , 0.51928584, 0.95921809,\n 0.81940003, 1. , 1. , 0.11529973, 0.99828934,\n 0.83983526, 0.96942389, 1. , 0.99927881, 0.99340903,\n 0.71501292, 0.91773457, 1. , 0.41328162, 0.79384402,\n 1. , 0.91863362, 0.9948718 , 0.30440526, 0.98028957,\n 0.40127788, 0.89775931, 1. , 0.9988266 , 0.99211245,\n 0.99879447, 0.93523406, 0.91287651, 0.93051174, 0.83350702,\n 0.79280925, 1. , 0.91268698, 1. , 0.4534825 ]]) perf_counter_diff
(chain, draw)
float64
0.0003082 0.0001804 ... 0.0002424
array([[0.0003082 , 0.00018039, 0.00021651, 0.00029714, 0.00015186,\n 0.00030623, 0.00014971, 0.00015767, 0.00047362, 0.00048123,\n 0.00043365, 0.00032022, 0.00029022, 0.00028134, 0.00028559,\n 0.00028583, 0.00014929, 0.00030061, 0.00014963, 0.0002944 ,\n 0.00016593, 0.00028834, 0.00014919, 0.00030752, 0.00014671,\n 0.00032628, 0.00034565, 0.00015482, 0.00037731, 0.00015541,\n 0.00023896, 0.0003669 , 0.00015858, 0.00015001, 0.00019463,\n 0.00015063, 0.00014676, 0.00027587, 0.00028484, 0.0001486 ,\n 0.00033426, 0.0003044 , 0.0002773 , 0.00016543, 0.00018731,\n 0.00032126, 0.00017137, 0.00035234, 0.00035463, 0.00029327,\n 0.00028706, 0.00020785, 0.00018892, 0.00049589, 0.00025981,\n 0.00042579, 0.00015235, 0.00014708, 0.0001484 , 0.00028251,\n 0.00027189, 0.00014642, 0.00029222, 0.00014655, 0.00028216,\n 0.00028505, 0.00027132, 0.00030236, 0.00026742, 0.00014495,\n 0.0003364 , 0.00014834, 0.00027841, 0.00016195, 0.00042122,\n 0.00026872, 0.00052015, 0.0002525 , 0.00024197, 0.00048082,\n 0.00044641, 0.00054642, 0.00024851, 0.00024202, 0.00045062,\n 0.00049246, 0.00053332, 0.00052543, 0.00050545, 0.00051209,\n 0.00046289, 0.00035263, 0.00015148, 0.00014855, 0.00016998,\n 0.00036941, 0.00048764, 0.00046801, 0.00020799, 0.0002941 ,\n...\n 0.0002528 , 0.00027239, 0.000254 , 0.00025321, 0.00025634,\n 0.00026036, 0.00054136, 0.00056745, 0.00055701, 0.00050281,\n 0.00026968, 0.00053439, 0.00061452, 0.00026184, 0.00054477,\n 0.00029567, 0.00052657, 0.00051775, 0.00025857, 0.00028299,\n 0.00060027, 0.00018744, 0.0003151 , 0.00030529, 0.0001552 ,\n 0.00015614, 0.0002943 , 0.00029772, 0.00015119, 0.00029642,\n 0.00028164, 0.00029295, 0.00028069, 0.00015639, 0.00032116,\n 0.00031191, 0.00016194, 0.00037503, 0.00015665, 0.0001953 ,\n 0.00015893, 0.00017799, 0.0002631 , 0.00016812, 0.0003066 ,\n 0.0003852 , 0.0003043 , 0.00029842, 0.00015383, 0.00015302,\n 0.00015352, 0.00033237, 0.00029614, 0.00015329, 0.00030398,\n 0.0001681 , 0.00029528, 0.00031884, 0.00022385, 0.00048329,\n 0.00025902, 0.0002744 , 0.00018311, 0.00015973, 0.00032263,\n 0.00030155, 0.00015505, 0.00017762, 0.0001592 , 0.00019922,\n 0.00015162, 0.00033148, 0.00049526, 0.00050243, 0.00036856,\n 0.00015665, 0.00015632, 0.00017035, 0.00015598, 0.00015284,\n 0.00015498, 0.00034259, 0.00029902, 0.00030494, 0.0003253 ,\n 0.00030077, 0.00033307, 0.00023961, 0.0003477 , 0.00030291,\n 0.00037673, 0.00030944, 0.00029302, 0.00020922, 0.00016001,\n 0.00021977, 0.00033518, 0.00030472, 0.00022102, 0.00024243]]) n_steps
(chain, draw)
float64
3.0 1.0 1.0 3.0 ... 3.0 3.0 1.0 1.0
array([[3., 1., 1., 3., 1., 3., 1., 1., 3., 3., 3., 3., 3., 3., 3., 3.,\n 1., 3., 1., 3., 1., 3., 1., 3., 1., 3., 3., 1., 3., 1., 1., 3.,\n 1., 1., 1., 1., 1., 3., 3., 1., 3., 3., 3., 1., 1., 3., 1., 3.,\n 3., 3., 3., 1., 1., 3., 1., 3., 1., 1., 1., 3., 3., 1., 3., 1.,\n 3., 3., 3., 3., 3., 1., 3., 1., 3., 1., 3., 1., 3., 1., 1., 3.,\n 3., 3., 1., 1., 3., 3., 3., 3., 3., 3., 3., 3., 1., 1., 1., 3.,\n 3., 3., 1., 3., 3., 3., 3., 1., 3., 3., 1., 3., 3., 1., 3., 3.,\n 1., 3., 3., 3., 3., 3., 1., 1., 1., 1., 3., 3., 3., 3., 3., 3.,\n 3., 1., 3., 3., 1., 1., 1., 3., 3., 3., 3., 3., 1., 3., 1., 1.,\n 1., 3., 3., 1., 1., 3., 3., 3., 3., 3., 1., 3., 3., 3., 3., 3.,\n 3., 1., 3., 3., 3., 3., 1., 1., 1., 1., 1., 3., 3., 1., 3., 1.,\n 1., 3., 3., 3., 3., 3., 3., 3., 3., 1., 1., 1., 1., 1., 3., 3.,\n 3., 3., 3., 3., 3., 1., 3., 1., 1., 1., 3., 1., 1., 1., 1., 1.,\n 1., 3., 3., 1., 3., 1., 1., 3., 3., 1., 1., 1., 3., 3., 3., 3.,\n 3., 3., 3., 3., 3., 1., 1., 1., 1., 1., 3., 3., 1., 3., 1., 1.,\n 3., 1., 1., 1., 1., 3., 3., 1., 3., 3., 1., 1., 1., 3., 1., 3.,\n 3., 3., 1., 3., 3., 1., 3., 3., 1., 3., 1., 1., 1., 3., 3., 3.,\n 3., 3., 3., 1., 1., 3., 1., 3., 3., 3., 3., 3., 3., 3., 1., 3.,\n 1., 1., 1., 3., 1., 3., 3., 1., 1., 1., 1., 1., 3., 3., 3., 3.,\n 1., 1., 3., 3., 3., 3., 3., 1., 1., 1., 1., 1., 3., 1., 3., 3.,\n...\n 1., 3., 1., 3., 1., 1., 3., 3., 3., 3., 3., 3., 3., 1., 1., 1.,\n 3., 1., 1., 3., 1., 3., 3., 3., 1., 3., 1., 3., 1., 1., 1., 3.,\n 3., 1., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3.,\n 1., 1., 1., 3., 3., 1., 3., 3., 3., 3., 3., 1., 3., 1., 1., 3.,\n 1., 1., 1., 1., 3., 3., 1., 3., 3., 3., 3., 1., 3., 1., 3., 3.,\n 3., 3., 3., 3., 3., 1., 1., 1., 1., 1., 1., 3., 1., 1., 1., 1.,\n 1., 1., 1., 1., 3., 1., 3., 1., 3., 3., 3., 3., 3., 3., 3., 1.,\n 3., 1., 3., 3., 3., 1., 3., 3., 3., 1., 1., 1., 1., 3., 3., 3.,\n 1., 3., 3., 1., 1., 3., 3., 1., 1., 3., 1., 1., 3., 3., 3., 3.,\n 3., 1., 1., 3., 3., 1., 3., 3., 3., 3., 1., 3., 3., 1., 1., 3.,\n 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 3., 1., 1., 3., 3., 1.,\n 3., 3., 3., 3., 1., 1., 3., 3., 3., 3., 1., 1., 1., 1., 3., 3.,\n 3., 1., 1., 3., 3., 3., 3., 3., 1., 1., 3., 1., 1., 1., 3., 1.,\n 1., 1., 1., 1., 1., 1., 3., 3., 3., 3., 1., 3., 3., 1., 3., 1.,\n 3., 3., 1., 1., 3., 1., 3., 3., 1., 1., 3., 3., 1., 3., 3., 3.,\n 3., 1., 3., 3., 1., 3., 1., 1., 1., 1., 1., 1., 3., 3., 3., 3.,\n 1., 1., 1., 3., 3., 1., 3., 1., 3., 3., 1., 3., 1., 1., 1., 1.,\n 3., 3., 1., 1., 1., 1., 1., 3., 3., 3., 3., 1., 1., 1., 1., 1.,\n 1., 3., 3., 3., 3., 3., 3., 1., 3., 3., 3., 3., 3., 1., 1., 1.,\n 3., 3., 1., 1.]]) process_time_diff
(chain, draw)
float64
0.0003081 0.0001803 ... 0.0002429
array([[0.00030807, 0.00018032, 0.00021692, 0.00029716, 0.00015165,\n 0.00030634, 0.00014976, 0.0001578 , 0.00047429, 0.00048149,\n 0.00043397, 0.00032032, 0.00029027, 0.00028182, 0.00028592,\n 0.00028575, 0.00014954, 0.00030087, 0.00014958, 0.00029464,\n 0.0001577 , 0.00028836, 0.00014932, 0.00030764, 0.00014661,\n 0.00032658, 0.00034585, 0.00015516, 0.00037789, 0.00015511,\n 0.00023936, 0.00036711, 0.00015858, 0.00015036, 0.00019512,\n 0.00015059, 0.00014695, 0.00027587, 0.00028481, 0.00014863,\n 0.00033459, 0.0003043 , 0.00027733, 0.00016557, 0.00018753,\n 0.00032195, 0.00017145, 0.0003528 , 0.00035471, 0.00029321,\n 0.00028752, 0.00020829, 0.0001891 , 0.00049629, 0.00025981,\n 0.00042593, 0.00015276, 0.00014715, 0.00014842, 0.00028267,\n 0.00027192, 0.00014628, 0.00029229, 0.00014652, 0.00028215,\n 0.00028515, 0.0002713 , 0.00030275, 0.00026751, 0.00014481,\n 0.00033703, 0.00014833, 0.00027846, 0.00016239, 0.00042226,\n 0.00026863, 0.00052102, 0.00025255, 0.0002419 , 0.0004815 ,\n 0.0004466 , 0.00054689, 0.00024864, 0.00024194, 0.00045069,\n 0.00049305, 0.00053346, 0.00052559, 0.00050581, 0.00051243,\n 0.00046297, 0.00035287, 0.00015154, 0.00014844, 0.00017026,\n 0.00036972, 0.00048808, 0.0004683 , 0.00020811, 0.00029429,\n...\n 0.00025299, 0.00027237, 0.00025435, 0.00025341, 0.00025661,\n 0.00026064, 0.00054189, 0.00056787, 0.00055718, 0.00050289,\n 0.00026938, 0.00053465, 0.00058391, 0.00026195, 0.00054503,\n 0.00029567, 0.00052677, 0.00051788, 0.00025873, 0.0002831 ,\n 0.00058941, 0.00018746, 0.00031504, 0.00030517, 0.00015539,\n 0.00015615, 0.00029432, 0.00029792, 0.0001511 , 0.00029683,\n 0.00028183, 0.00029289, 0.00028117, 0.00015654, 0.00032132,\n 0.00031232, 0.00016179, 0.00037534, 0.00015697, 0.00019529,\n 0.00015886, 0.00017829, 0.00026269, 0.00016777, 0.00030673,\n 0.00038541, 0.00030415, 0.00029856, 0.00015377, 0.00015294,\n 0.00015369, 0.00033266, 0.00029646, 0.00015343, 0.00030397,\n 0.00016853, 0.00029546, 0.00031899, 0.00022452, 0.00048402,\n 0.00025944, 0.00027477, 0.00018293, 0.00015962, 0.00032841,\n 0.0003015 , 0.00015499, 0.00017809, 0.00015922, 0.00016468,\n 0.00015193, 0.00033158, 0.00050071, 0.00050263, 0.00036865,\n 0.00015655, 0.00015643, 0.00017033, 0.00015596, 0.00015313,\n 0.00015503, 0.00034296, 0.00029923, 0.00030501, 0.00032563,\n 0.00030093, 0.00033352, 0.00023992, 0.00034763, 0.00030294,\n 0.0003772 , 0.00030972, 0.00029301, 0.00018547, 0.0001602 ,\n 0.0002196 , 0.00033561, 0.00030491, 0.00022096, 0.00024291]]) lp
(chain, draw)
float64
-1.024e+03 ... -1.024e+03
array([[-1024.30775079, -1024.96105897, -1027.80698026, -1025.94050555,\n -1023.6682771 , -1023.44618818, -1023.40930109, -1023.52475071,\n -1024.60745017, -1023.4178119 , -1023.4178119 , -1024.0117698 ,\n -1023.39749539, -1023.43755595, -1024.13763124, -1024.65073675,\n -1023.60496911, -1023.82612712, -1024.34484282, -1023.96850353,\n -1023.52159417, -1024.43105604, -1023.64802617, -1023.70523322,\n -1023.49468238, -1023.57258626, -1023.42097772, -1023.42939427,\n -1025.14693729, -1023.67799639, -1023.76995536, -1023.57683655,\n -1023.50878025, -1023.40977911, -1023.52106895, -1023.79131641,\n -1023.51600718, -1023.39144907, -1023.63837314, -1023.55935147,\n -1023.39015826, -1023.40436596, -1026.90770415, -1023.82738375,\n -1023.71244495, -1023.68318065, -1023.49423888, -1023.39218501,\n -1024.9880592 , -1023.51691894, -1023.64575712, -1023.42656796,\n -1023.49861186, -1024.38938327, -1026.14942616, -1024.52895749,\n -1023.78549725, -1023.78549725, -1024.28898007, -1023.39384906,\n -1023.66630097, -1024.29033273, -1023.60627283, -1023.62357536,\n -1023.42454775, -1024.21881044, -1023.46017891, -1023.42976407,\n -1024.27484161, -1023.82991685, -1023.44725773, -1023.44725773,\n -1024.49140263, -1024.49140263, -1023.5555129 , -1023.41892585,\n -1024.79040038, -1026.16039573, -1024.49739595, -1025.38193048,\n...\n -1023.68491134, -1023.70232926, -1023.61302719, -1023.64427886,\n -1023.64427886, -1023.47993515, -1024.17232401, -1023.97024715,\n -1023.97024715, -1024.16966713, -1023.3906045 , -1023.42658483,\n -1023.42658483, -1023.47541835, -1024.37777266, -1024.63256913,\n -1023.59168561, -1023.61138706, -1023.88176008, -1023.99339387,\n -1023.57149172, -1023.75377445, -1023.82096967, -1024.22933275,\n -1024.70885838, -1023.79144439, -1024.52574071, -1023.72201658,\n -1025.49973711, -1023.82806623, -1023.39877854, -1023.44219667,\n -1023.41595458, -1023.39172599, -1023.70697304, -1023.47669674,\n -1023.44183873, -1024.00836063, -1023.53893129, -1023.52490078,\n -1023.5945647 , -1023.54459498, -1023.44812772, -1024.98610433,\n -1024.12222008, -1025.10193311, -1024.68675046, -1023.70766346,\n -1023.70766346, -1023.71156088, -1024.1076905 , -1024.33292642,\n -1023.40778249, -1023.3985514 , -1023.4098317 , -1024.19265317,\n -1024.19265317, -1023.49551094, -1025.57780211, -1026.13599595,\n -1023.81302769, -1023.53270353, -1023.5689554 , -1023.833317 ,\n -1023.97298039, -1023.4376185 , -1023.53956301, -1023.44952244,\n -1023.39003509, -1023.39270641, -1023.39555161, -1023.3942467 ,\n -1023.43543408, -1023.60037421, -1024.01454038, -1024.53872333,\n -1024.04389088, -1024.49440701, -1024.18048471, -1024.18048471]]) perf_counter_start
(chain, draw)
float64
2.542e+03 2.542e+03 ... 2.544e+03
array([[2541.5892196 , 2541.58963347, 2541.58992054, 2541.59024653,\n 2541.59064301, 2541.59088999, 2541.59129357, 2541.59156106,\n 2541.59185108, 2541.59250656, 2541.59316436, 2541.59374883,\n 2541.5941794 , 2541.59457065, 2541.59494933, 2541.59533527,\n 2541.59571613, 2541.59595948, 2541.59635748, 2541.59660297,\n 2541.59699977, 2541.59726331, 2541.59764818, 2541.59788964,\n 2541.59829313, 2541.59854829, 2541.59900745, 2541.59946407,\n 2541.59974055, 2541.60023048, 2541.60053451, 2541.60092337,\n 2541.60140396, 2541.60166394, 2541.6019092 , 2541.60220285,\n 2541.60244655, 2541.6026852 , 2541.60306999, 2541.60345011,\n 2541.60373707, 2541.60420879, 2541.60461152, 2541.60498815,\n 2541.60524968, 2541.60553622, 2541.60596326, 2541.60623217,\n 2541.60673987, 2541.60720435, 2541.60759738, 2541.60798206,\n 2541.60833446, 2541.60866514, 2541.60935349, 2541.6097662 ,\n 2541.61029945, 2541.6105474 , 2541.61080589, 2541.61107879,\n 2541.61145543, 2541.61181847, 2541.61207443, 2541.61246349,\n 2541.61270247, 2541.61308985, 2541.61347853, 2541.61384366,\n 2541.61424076, 2541.61460004, 2541.61483569, 2541.6152721 ,\n 2541.61553881, 2541.61591008, 2541.61616489, 2541.61675966,\n 2541.61719264, 2541.61791259, 2541.61831229, 2541.61869371,\n...\n 2543.99261333, 2543.99336934, 2543.99367464, 2543.99410636,\n 2543.9945168 , 2543.99477217, 2543.99503799, 2543.99543312,\n 2543.99582999, 2543.99608548, 2543.99647887, 2543.99685612,\n 2543.99725163, 2543.99763413, 2543.99788614, 2543.99830973,\n 2543.99874364, 2543.99901351, 2543.99950071, 2543.99976132,\n 2544.00007236, 2544.00033644, 2544.0006777 , 2544.00108466,\n 2544.00135916, 2544.00177079, 2544.00226709, 2544.00267605,\n 2544.00308728, 2544.00333556, 2544.00358375, 2544.00383389,\n 2544.00426775, 2544.0046616 , 2544.00491069, 2544.00532825,\n 2544.00559346, 2544.00599015, 2544.00644499, 2544.00677788,\n 2544.00742608, 2544.0078449 , 2544.00828292, 2544.00857446,\n 2544.00883161, 2544.00927415, 2544.00967577, 2544.00992809,\n 2544.010204 , 2544.01045887, 2544.01075683, 2544.01101309,\n 2544.01150298, 2544.01219226, 2544.0128355 , 2544.01331604,\n 2544.0135912 , 2544.01384526, 2544.01411977, 2544.01437295,\n 2544.01462213, 2544.01487491, 2544.01532341, 2544.0157209 ,\n 2544.01612458, 2544.01655265, 2544.01695594, 2544.01741238,\n 2544.01776382, 2544.01822487, 2544.01864718, 2544.01914483,\n 2544.01955444, 2544.01994343, 2544.02026516, 2544.02067119,\n 2544.02102184, 2544.02147025, 2544.0218808 , 2544.02224681]]) index_in_trajectory
(chain, draw)
int64
1 -1 1 -2 1 -2 -1 ... 1 1 2 -2 1 0
array([[ 1, -1, 1, -2, 1, -2, -1, 1, 3, -2, 0, 2, -3, 3, 1, -1,\n -1, 2, 1, -2, 1, 2, -1, -1, 1, 2, -3, 1, 1, 1, -1, -2,\n 1, 1, -1, 1, 1, 1, 2, -1, 2, 2, -1, 1, 1, -2, 1, -1,\n 1, -3, 2, -1, -1, -1, 1, -1, 1, 0, -1, -1, -1, 1, 2, -1,\n 2, 2, -1, 3, 1, -1, 1, 0, 1, 0, -1, 1, 1, 1, -1, -2,\n -1, 2, -1, -1, 1, -3, -2, -3, -2, 1, 2, 1, 0, 0, 1, -1,\n 1, -1, -1, 3, -2, 3, -1, 1, 2, -1, -1, 1, -1, 1, 3, -3,\n 1, 0, -2, 1, 1, 0, 0, 1, -1, 1, 1, -1, -1, -3, -2, -1,\n 3, 1, -1, 1, 1, -1, 1, 0, -3, -3, -3, -2, 1, 2, 1, 1,\n 1, 0, -2, 1, 1, -2, 0, 3, -1, -3, 1, -3, -2, 3, 0, -2,\n -3, 0, -1, -3, 2, -2, -1, 1, -1, 1, -1, -1, -2, 1, -1, -1,\n -1, 3, 1, 2, 2, 1, 3, 1, -1, 1, 1, 1, 1, 1, 3, 0,\n 0, 2, -1, 2, -2, 1, 1, 1, -1, 0, -2, 1, 1, -1, -1, 1,\n 1, 1, -2, 1, -1, 0, -1, 3, 0, 1, 0, -1, -1, -2, -3, 2,\n -3, 0, -1, 0, 2, 1, 1, 1, -1, 0, -3, 1, -1, -2, -1, 1,\n 1, 0, -1, 0, 1, 1, -1, -1, 3, -1, -1, 1, 1, -2, 1, -1,\n 2, -2, -1, 3, 1, 1, -2, 1, -1, 1, -1, -1, 1, -1, 0, 1,\n -2, -1, 1, -1, 1, -3, 1, -1, -3, 2, 1, 1, 0, 1, -1, -2,\n -1, -1, -1, -2, 0, 2, -3, 1, -1, -1, -1, -1, 3, -3, -1, -3,\n -1, 1, 2, -2, -2, -3, 3, -1, 1, -1, -1, -1, 2, -1, -1, 2,\n...\n -1, -3, -1, -2, 1, -1, 0, 3, 3, -2, 0, -3, -2, -1, 1, 1,\n 2, 0, 1, -1, -1, 1, 1, -1, 1, -3, 1, -1, -1, -1, -1, 3,\n -1, 1, -1, -1, 2, 2, -1, -1, -3, -2, 0, -3, -3, 2, 2, 1,\n -1, 0, -1, -1, -1, 1, 3, -1, 2, 1, -1, 0, 2, 1, 1, 2,\n 1, 0, 1, 1, 1, -2, 1, 1, 1, -1, -2, -1, 2, 1, -2, -3,\n -1, 3, 1, 1, 0, 1, 1, 1, 1, 1, 1, 3, -1, 1, -1, -1,\n 1, 1, 0, 1, -2, -1, -3, 1, 3, 2, -2, 2, 2, -2, -1, -1,\n -3, 1, -3, -2, -2, 1, -3, 2, -1, -1, 1, 1, 1, -1, 1, 3,\n -1, 2, 2, -1, 0, 1, -3, 1, 1, -1, -1, 0, -3, -2, -3, -1,\n -2, 1, -1, 3, -2, 1, 2, 3, 0, -1, 0, 2, 2, -1, 0, -2,\n 1, 2, 2, 2, -3, 2, 3, -3, -1, -2, -2, -1, -1, 2, 2, -1,\n 0, 3, 1, -2, 1, 1, -2, -2, 3, 0, -1, 0, 1, -1, 1, 1,\n 2, 0, -1, -3, 2, 1, 2, -2, 1, -1, 2, -1, -1, -1, 3, -1,\n 1, -1, 0, -1, 1, 0, 3, -3, 3, 1, -1, 2, -3, 1, 2, 1,\n -3, -3, 1, 1, 1, 1, 2, -1, 0, -1, 3, -2, 0, -1, -1, 1,\n 0, 1, 1, 2, 1, -1, -1, 1, 1, 1, -1, -1, -2, -1, -2, -1,\n -1, -1, -1, 2, 2, 1, 1, 1, -2, -2, 1, 3, -1, 1, 1, 1,\n 2, -1, 1, 1, 0, -1, -1, 2, 1, -2, -2, -1, 0, -1, 1, 1,\n -1, 2, -2, -2, 2, 2, -2, -1, 3, -2, -2, 2, -2, 1, 1, 1,\n 2, -2, 1, 0]]) reached_max_treedepth
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) step_size
(chain, draw)
float64
1.181 1.181 1.181 ... 1.252 1.252
array([[1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n 1.18087031, 1.18087031, 1.18087031, 1.18087031, 1.18087031,\n...\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785,\n 1.25213785, 1.25213785, 1.25213785, 1.25213785, 1.25213785]]) diverging
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) energy_error
(chain, draw)
float64
0.03457 0.2151 ... -0.1392 0.0
array([[ 3.45724391e-02, 2.15115734e-01, 9.21767451e-01,\n -6.20041927e-01, -7.88613971e-01, -7.49273915e-02,\n -1.24698054e-02, 3.89861344e-02, 3.57054412e-01,\n -3.88698515e-01, 0.00000000e+00, 2.05007673e-01,\n -2.15717051e-01, 1.37731551e-02, 2.36253468e-01,\n 1.78461503e-01, -3.61433947e-01, 7.49669514e-02,\n 1.72320250e-01, -1.26444286e-01, -1.53774021e-01,\n 3.13583090e-01, -2.60818287e-01, 1.91508900e-02,\n -7.07074133e-02, 2.65373020e-02, -5.01731258e-02,\n 2.87259314e-03, 5.85400070e-01, -4.87653213e-01,\n 3.07487880e-02, -6.54763350e-02, -2.33429546e-02,\n -3.38133455e-02, 3.80151775e-02, 9.28932313e-02,\n -9.46217572e-02, -4.24336796e-02, 8.25326841e-02,\n -2.71483417e-02, -5.67479287e-02, 4.80688118e-03,\n 1.20521023e+00, -1.07361649e+00, -3.96393328e-02,\n -9.60535524e-03, -6.48519506e-02, -3.47706571e-02,\n 5.40090269e-01, -5.15323334e-01, 4.36939279e-02,\n -7.38514325e-02, 2.43094941e-02, 2.98021948e-01,\n 5.76207766e-01, -5.29752100e-01, -2.46891136e-01,\n 0.00000000e+00, 1.67414337e-01, -3.02028526e-01,\n...\n 7.99663307e-02, 2.95633920e-02, 1.80281207e-01,\n 2.13647152e-01, -3.87955918e-01, 2.68082543e-01,\n -3.54917481e-01, 7.87538199e-01, -7.41564401e-01,\n -1.87299072e-01, 1.94947940e-02, -1.24710069e-02,\n -1.04707610e-02, 1.35809613e-01, -9.86451923e-02,\n -1.52801942e-02, 2.76235309e-01, -2.00268060e-01,\n -6.05962540e-03, 2.98446307e-02, -2.13991386e-02,\n -4.14401410e-02, 6.55300790e-01, -4.04210135e-01,\n 4.35274327e-01, -1.85097193e-01, -4.32548302e-01,\n 0.00000000e+00, 1.71212777e-03, 1.74549531e-01,\n 9.62117594e-02, -3.96276194e-01, -4.03764055e-03,\n 4.94560033e-03, 3.35454665e-01, 0.00000000e+00,\n -2.97449623e-01, 8.83626031e-01, 2.30868291e-01,\n -9.76422603e-01, -1.29275615e-01, 1.55041783e-02,\n 1.37906782e-01, 6.09516560e-02, -1.78608131e-01,\n 4.18530888e-02, -3.93081661e-02, -2.57876919e-02,\n 1.14125426e-03, 1.23903401e-03, -5.87500682e-04,\n 1.69877724e-02, 7.20205893e-02, 1.82113154e-01,\n 2.32172623e-01, -1.99919388e-01, 1.83955533e-01,\n -1.39184318e-01, 0.00000000e+00]]) largest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) tree_depth
(chain, draw)
int64
2 1 1 2 1 2 1 1 ... 2 1 1 1 2 2 1 1
array([[2, 1, 1, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 2,\n 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 2, 2, 1,\n 1, 2, 1, 2, 2, 2, 2, 1, 1, 2, 1, 2, 1, 1, 1, 2, 2, 1, 2, 1, 2, 2,\n 2, 2, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2,\n 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1,\n 2, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2,\n 1, 1, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 1, 1, 2, 2, 2, 2, 2,\n 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 1,\n 1, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 1,\n 2, 1, 1, 1, 2, 1, 1, 1, 1, 1, 1, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 1,\n 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 1, 2, 1, 1, 2, 1,\n 1, 1, 1, 2, 2, 1, 2, 2, 1, 1, 1, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2,\n 1, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 1, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2,\n 1, 2, 1, 1, 1, 2, 1, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2,\n 2, 2, 2, 1, 1, 1, 1, 1, 2, 1, 2, 2, 1, 2, 1, 1, 2, 2, 2, 2, 2, 2,\n 1, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 2, 2, 1, 2, 2,\n 1, 1, 2, 2, 2, 1, 1, 2, 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 1, 2, 1, 2,\n 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1,\n 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1,\n 2, 2, 2, 2, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 1, 1, 1, 2, 1, 2,\n...\n 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2, 1, 2, 1, 1, 2, 1,\n 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1,\n 2, 2, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 2,\n 2, 1, 1, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 1, 1, 2,\n 2, 1, 1, 2, 2, 2, 2, 2, 1, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 2, 2, 1,\n 2, 1, 2, 2, 1, 2, 2, 2, 1, 2, 1, 1, 2, 2, 1, 1, 1, 2, 1, 2, 1, 1,\n 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 1, 1, 2, 1, 2, 2, 2, 1, 2, 1, 2,\n 1, 1, 1, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1,\n 1, 2, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 1, 1, 1, 2, 2, 1, 2,\n 2, 2, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2, 1, 1,\n 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 1, 2, 1, 2, 2,\n 2, 1, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 1, 2, 2, 1, 1, 2, 2, 1, 1, 2,\n 1, 1, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 1, 2,\n 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 1, 1,\n 2, 2, 2, 2, 1, 1, 1, 1, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 1, 1, 2, 1,\n 1, 1, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 2, 2,\n 1, 1, 2, 1, 2, 2, 1, 1, 2, 2, 1, 2, 2, 2, 2, 1, 2, 2, 1, 2, 1, 1,\n 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 2, 2, 1, 2, 1, 2, 2, 1, 2, 1, 1,\n 1, 1, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 2, 2, 2,\n 2, 2, 2, 1, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 1, 1]]) max_energy_error
(chain, draw)
float64
-0.255 0.2151 ... -0.1392 0.7908
array([[-2.55043496e-01, 2.15115734e-01, 9.21767451e-01,\n -1.32336237e+00, -7.88613971e-01, -8.90444617e-02,\n -1.24698054e-02, 3.89861344e-02, 7.20123198e-01,\n 1.12149930e+00, 7.72170049e-01, 2.94358649e-01,\n 4.36820199e-01, 6.86140804e-02, 3.18229733e-01,\n -2.39722905e-01, -3.61433947e-01, 7.49669514e-02,\n 1.72320250e-01, -2.89218485e-01, -1.53774021e-01,\n 3.13583090e-01, -2.60818287e-01, -8.15102082e-02,\n -7.07074133e-02, -3.16967564e-02, 2.47492567e-01,\n 2.87259314e-03, 7.29026575e-01, -4.87653213e-01,\n 3.07487880e-02, -1.23766480e-01, -2.33429546e-02,\n -3.38133455e-02, 3.80151775e-02, 9.28932313e-02,\n -9.46217572e-02, -4.24336796e-02, 2.50835257e-01,\n -2.71483417e-02, 9.89128134e-02, 1.48661961e-02,\n 1.20521023e+00, -1.07361649e+00, -3.96393328e-02,\n 1.04318264e+00, -6.48519506e-02, -3.47706571e-02,\n 5.85463711e-01, -5.15323334e-01, 4.36939279e-02,\n -7.38514325e-02, 2.43094941e-02, 2.98021948e-01,\n 5.76207766e-01, -9.27389546e-01, -2.46891136e-01,\n 1.88004105e-01, 1.67414337e-01, -3.02028526e-01,\n...\n 7.99663307e-02, 2.95633920e-02, 1.80281207e-01,\n -2.55613574e-01, -5.35134805e-01, 1.33418858e+00,\n -4.47045764e-01, 7.87538199e-01, -7.41564401e-01,\n -1.87299072e-01, 9.73852516e-02, 6.96475036e-01,\n -1.04707610e-02, 1.35809613e-01, -9.86451923e-02,\n -2.58339915e-02, 1.35764276e+00, -2.00268060e-01,\n 4.91776944e-02, 2.98446307e-02, -2.13991386e-02,\n -4.14401410e-02, 6.55300790e-01, -4.04210135e-01,\n 4.35274327e-01, -1.85097193e-01, -4.32548302e-01,\n 2.16022022e+00, 1.71212777e-03, 1.74549531e-01,\n -2.96765857e-01, -3.96276194e-01, -4.03764055e-03,\n 1.00846441e-02, 3.35454665e-01, 8.58470643e-02,\n -2.97449623e-01, 8.83626031e-01, 2.30868291e-01,\n -9.76422603e-01, 1.99632149e-01, -6.06356003e-02,\n 3.91956335e+00, -1.79162657e-01, 2.59737433e+00,\n 1.70186294e-01, -3.93081661e-02, -2.57876919e-02,\n 1.23250867e-02, 1.82940822e-03, 1.05679746e-01,\n 1.44801166e-01, 7.20205893e-02, 1.82113154e-01,\n 2.32172623e-01, -2.19746777e-01, 1.83955533e-01,\n -1.39184318e-01, 7.90798604e-01]]) energy
(chain, draw)
float64
1.025e+03 1.025e+03 ... 1.025e+03
array([[1024.56260486, 1025.17815314, 1028.01889134, 1027.19325044,\n 1025.1529763 , 1023.65521273, 1023.43659282, 1023.52504436,\n 1025.33902406, 1026.44649652, 1024.95506127, 1024.36077746,\n 1024.85357349, 1023.58449214, 1024.31309841, 1024.84496969,\n 1024.29902517, 1023.83238135, 1024.42930077, 1024.21873142,\n 1023.82605456, 1024.7575754 , 1024.19168979, 1023.78333995,\n 1023.64909121, 1023.57305884, 1024.17308263, 1023.43873894,\n 1025.45090273, 1024.66574494, 1023.85318361, 1023.71005107,\n 1023.58516335, 1023.47568863, 1023.52156121, 1023.81071639,\n 1023.71444925, 1023.47762286, 1023.99644032, 1023.65852211,\n 1023.720151 , 1023.42588324, 1027.05588959, 1025.8401709 ,\n 1023.88247589, 1025.65921072, 1023.63527672, 1023.46173388,\n 1025.1505155 , 1025.38513227, 1023.6492881 , 1023.57242597,\n 1023.50388334, 1024.38938443, 1026.29249487, 1025.81040928,\n 1024.33818954, 1024.23199833, 1024.36292753, 1024.0334836 ,\n 1023.70458403, 1024.32894102, 1024.22108575, 1023.69366841,\n 1023.62270001, 1024.68969232, 1023.96533611, 1023.80465058,\n 1024.45345891, 1024.22069045, 1023.84895409, 1023.6200142 ,\n 1024.5001212 , 1025.70737777, 1024.18159018, 1023.51050262,\n 1025.0020207 , 1026.49079489, 1025.79260609, 1025.38894289,\n...\n 1023.78260534, 1023.83021214, 1023.70072624, 1023.73789964,\n 1025.59864376, 1023.60431972, 1024.43966987, 1024.15003408,\n 1026.1086685 , 1024.41360665, 1023.84071925, 1023.42658532,\n 1024.89682788, 1023.48864562, 1024.39032358, 1024.68320404,\n 1024.23026691, 1023.69823976, 1023.96575365, 1024.20272396,\n 1023.87020492, 1023.82400287, 1023.97631362, 1024.39870189,\n 1024.96950115, 1024.44584594, 1026.25634824, 1024.28517823,\n 1025.59389336, 1024.89624801, 1023.64082876, 1023.56077044,\n 1024.3684809 , 1023.40571463, 1023.70777585, 1023.63518482,\n 1023.48596983, 1025.66040791, 1023.8513679 , 1023.70944416,\n 1023.64823994, 1023.63637695, 1023.52332828, 1024.98768279,\n 1025.41498482, 1025.38211549, 1025.46830337, 1024.35824728,\n 1026.45300881, 1023.85024266, 1024.23014983, 1024.39567824,\n 1023.93668782, 1023.4139746 , 1023.42447942, 1024.19272607,\n 1024.63749557, 1023.91727785, 1025.58352783, 1027.01246543,\n 1025.24979006, 1024.10811441, 1023.57635987, 1029.19416547,\n 1024.01324533, 1027.13627457, 1023.75649938, 1023.52193231,\n 1023.42899242, 1023.40779804, 1023.398653 , 1023.53470209,\n 1023.6112053 , 1023.61408566, 1024.08903563, 1024.78808172,\n 1024.81968362, 1024.8854995 , 1024.68660585, 1025.44691205]]) Attributes: (8)
created_at : 2026-01-08T04:57:00.923531+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 11.285976886749268 tuning_steps : 500 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n observed_data \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 12kB\nDimensions: (__obs__: 500, rt,response_extra_dim_0: 2)\nCoordinates:\n * __obs__ (__obs__) int64 4kB 0 1 2 3 4 ... 496 497 498 499\n * rt,response_extra_dim_0 (rt,response_extra_dim_0) int64 16B 0 1\nData variables:\n rt,response (__obs__, rt,response_extra_dim_0) float64 8kB 5...\nAttributes:\n created_at: 2026-01-08T04:57:00.929988+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions: __obs__ : 500rt,response_extra_dim_0 : 2
Coordinates: (2)
Data variables: (1)
rt,response
(__obs__, rt,response_extra_dim_0)
float64
5.251 -1.0 0.817 ... -1.0 2.098 1.0
array([[ 5.25085068, -1. ],\n [ 0.81702983, 1. ],\n [ 1.22963595, 1. ],\n [ 2.08696556, 1. ],\n [ 2.89212894, 1. ],\n [ 3.54065871, 1. ],\n [ 1.72898936, 1. ],\n [ 2.21090579, 1. ],\n [ 4.0863018 , -1. ],\n [ 8.18515587, -1. ],\n [ 1.17861474, 1. ],\n [ 1.71078253, 1. ],\n [ 1.11688149, 1. ],\n [ 1.92637146, 1. ],\n [ 4.49467134, 1. ],\n [ 1.7099334 , 1. ],\n [ 2.4099648 , 1. ],\n [ 1.55375707, 1. ],\n [ 3.20066214, -1. ],\n [ 1.17386627, 1. ],\n...\n [ 3.52440763, 1. ],\n [ 1.30275202, 1. ],\n [ 5.3520937 , 1. ],\n [ 0.9726209 , 1. ],\n [ 1.31584668, 1. ],\n [ 0.95979559, 1. ],\n [ 1.75311542, 1. ],\n [ 5.5283246 , -1. ],\n [ 3.77018714, -1. ],\n [ 9.15060711, 1. ],\n [ 3.17011929, 1. ],\n [ 1.92992651, 1. ],\n [ 1.81051934, 1. ],\n [ 1.4278065 , 1. ],\n [ 1.83704746, 1. ],\n [ 1.17173886, 1. ],\n [ 1.67033231, 1. ],\n [ 3.16141605, 1. ],\n [ 3.40105796, -1. ],\n [ 2.09810948, 1. ]]) Attributes: (6)
created_at : 2026-01-08T04:57:00.929988+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n
\n "
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nMultiprocess sampling (2 chains in 2 jobs)\nNUTS: [v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 1.181 3 2287.01 draws/s 0:00:00 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 1.252 1 374.87 draws/s 0:00:02 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 500 tune and 500 draw iterations (1_000 + 1_000 draws total) took 11 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "wlyU",
+ "code_hash": "a757f284768cce9719b56f4ad9887a04",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "GrQN",
+ "code_hash": "361dd8284e340f581d8090a4ec260898",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "mySd",
+ "code_hash": "5883f7bd0584604e42f234d13268f244",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "vGkK",
+ "code_hash": "1de6a377ea06ad43551d6b2e83c6594a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: ddm\n\nResponse variable: rt,response\nLikelihood: analytical\nObservations: 500\n\nParameters:\n\nv:\n Prior: Normal(mu: 0.0, sigma: 0.01)\n Explicit bounds: (-inf, inf)\n\na:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\nz:\n Prior: Uniform(lower: 0.0, upper: 1.0)\n Explicit bounds: (0.0, 1.0)\n\nt:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zmIa",
+ "code_hash": "d1004fec36de15b95ac649b1ae66e5b7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nMultiprocess sampling (2 chains in 2 jobs)\nNUTS: [a, t, z, v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 0.561 3 66.02 draws/s 0:00:15 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 0.514 3 58.35 draws/s 0:00:17 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 500 tune and 500 draw iterations (1_000 + 1_000 draws total) took 26 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "NOfw",
+ "code_hash": "4dceafa3a25dcc4cdf6db0c4c401d598",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ynmH",
+ "code_hash": "b2d6cef98f71b3bc1d14cc4046ce06e8",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ChmK",
+ "code_hash": "b1830d5f93bb0500d7a963d221529223",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "KdvC",
+ "code_hash": "382be22448d8de9a9239ddb14d59a0bf",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: ddm\n\nResponse variable: rt,response\nLikelihood: analytical\nObservations: 1000\n\nParameters:\n\nv:\n Formula: v ~ 1 + x + y\n Priors:\n v_Intercept ~ Normal(mu: 2.0, sigma: 3.0)\n v_x ~ Normal(mu: 0.0, sigma: 0.25)\n v_y ~ Normal(mu: 0.0, sigma: 0.25)\n Link: identity\n Explicit bounds: (-inf, inf)\n\na:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\nz:\n Prior: Uniform(lower: 0.0, upper: 1.0)\n Explicit bounds: (0.0, 1.0)\n\nt:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "EKSN",
+ "code_hash": "249969ad166dd51acd14410516f31067",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "tasN",
+ "code_hash": "591440de6b7f4814526437caec7de25a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: ddm\n\nResponse variable: rt,response\nLikelihood: analytical\nObservations: 1000\n\nParameters:\n\nv:\n Formula: v ~ 1 + x + y\n Priors:\n v_Intercept ~ Normal(mu: 2.0, sigma: 3.0)\n v_x ~ Normal(mu: 0.0, sigma: 0.25)\n v_y ~ Normal(mu: 0.0, sigma: 0.25)\n Link: identity\n Explicit bounds: (-inf, inf)\n\na:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\nz:\n Prior: Uniform(lower: 0.0, upper: 1.0)\n Explicit bounds: (0.0, 1.0)\n\nt:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "SXvM",
+ "code_hash": "b30808ef95e931bdfc0bb7d19e499064",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "CFll",
+ "code_hash": "80d5498b9cf7c86809a7e711650b5e77",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Hierarchical Sequential Sampling Model\nModel: ddm\n\nResponse variable: rt,response\nLikelihood: analytical\nObservations: 1000\n\nParameters:\n\nv:\n Formula: v ~ 1 + x + y\n Priors:\n v_Intercept ~ Normal(mu: 2.0, sigma: 3.0)\n v_x ~ Normal(mu: 0.0, sigma: 0.25)\n v_y ~ Normal(mu: 0.0, sigma: 0.25)\n Link: identity\n Explicit bounds: (-inf, inf)\n\na:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\nz:\n Prior: Uniform(lower: 0.0, upper: 1.0)\n Explicit bounds: (0.0, 1.0)\n\nt:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0)\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "GbuD",
+ "code_hash": "f5edd1927e52a5cb97933b8fb88707fe",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "bHhy",
+ "code_hash": "e5c0889cec505a983b62d1ff9c682254",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: ddm\n\nResponse variable: rt,response\nLikelihood: analytical\nObservations: 1000\n\nParameters:\n\nv:\n Formula: v ~ 1 + x + y\n Priors:\n v_Intercept ~ Uniform(lower: -3.0, upper: 3.0)\n v_x ~ Uniform(lower: -1.0, upper: 1.0)\n v_y ~ Uniform(lower: -1.0, upper: 1.0)\n Link: identity\n Explicit bounds: (-inf, inf)\n\na:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\nz:\n Prior: Uniform(lower: 0.0, upper: 1.0)\n Explicit bounds: (0.0, 1.0)\n\nt:\n Prior: HalfNormal(sigma: 2.0)\n Explicit bounds: (0.0, inf)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "NtNR",
+ "code_hash": "d6d4b1c4f5a9b3b94f3adc9aab7d6a93",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nMultiprocess sampling (2 chains in 2 jobs)\nNUTS: [a, t, z, v_Intercept, v_x, v_y]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 0.530 7 25.63 draws/s 0:00:38 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 0.474 7 24.22 draws/s 0:00:41 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 500 tune and 500 draw iterations (1_000 + 1_000 draws total) took 50 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/1000 [00:00, ?it/s]\r 7%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 70/1000 [00:00<00:01, 697.89it/s]\r 15%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 146/1000 [00:00<00:01, 732.09it/s]\r 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 226/1000 [00:00<00:01, 759.99it/s]\r 31%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 309/1000 [00:00<00:00, 786.53it/s]\r 40%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 395/1000 [00:00<00:00, 810.42it/s]\r 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 481/1000 [00:00<00:00, 824.69it/s]\r 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 566/1000 [00:00<00:00, 830.23it/s]\r 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 650/1000 [00:00<00:00, 748.55it/s]\r 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 727/1000 [00:00<00:00, 697.49it/s]\r 80%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 799/1000 [00:01<00:00, 648.11it/s]\r 87%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 866/1000 [00:01<00:00, 629.94it/s]\r 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 934/1000 [00:01<00:00, 642.92it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1000/1000 [00:01<00:00, 711.05it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "nebC",
+ "code_hash": "80a7510380e2920fac9549abe9378a93",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n \n \n
\n \n \n \n posterior \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 52kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 ... 493 494 495 496 497 498 499\nData variables:\n v_Intercept (chain, draw) float64 8kB 0.3194 0.3201 0.35 ... 0.3987 0.2783\n v_x (chain, draw) float64 8kB 0.824 0.8574 0.7698 ... 0.8455 0.8247\n z (chain, draw) float64 8kB 0.4879 0.4997 0.481 ... 0.4699 0.5145\n v_y (chain, draw) float64 8kB 0.2902 0.3163 ... 0.2837 0.2524\n a (chain, draw) float64 8kB 1.514 1.503 1.529 ... 1.477 1.471\n t (chain, draw) float64 8kB 0.1076 0.08356 ... 0.1176 0.1185\nAttributes:\n created_at: 2026-01-08T04:58:29.481075+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 49.984930753707886\n tuning_steps: 500\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (6)
v_Intercept
(chain, draw)
float64
0.3194 0.3201 ... 0.3987 0.2783
array([[0.31938351, 0.32012164, 0.34996372, 0.25245459, 0.37396485,\n 0.27491537, 0.31142007, 0.32241861, 0.31850388, 0.28479844,\n 0.32031009, 0.35093125, 0.43481719, 0.22496466, 0.24886393,\n 0.38870308, 0.37895096, 0.32022898, 0.29783098, 0.31062358,\n 0.29816064, 0.29860177, 0.32584201, 0.32976566, 0.33584598,\n 0.33298332, 0.30778557, 0.40058023, 0.31721184, 0.29886862,\n 0.30591569, 0.35631218, 0.37038322, 0.3702663 , 0.34454642,\n 0.35487019, 0.35454749, 0.33463646, 0.33649154, 0.33047292,\n 0.29123706, 0.31430024, 0.29227383, 0.35387469, 0.29846193,\n 0.31499423, 0.2738108 , 0.33623978, 0.32936435, 0.28506557,\n 0.34852772, 0.33340627, 0.34476586, 0.27641189, 0.2913569 ,\n 0.30884555, 0.35395198, 0.29600727, 0.29557909, 0.29162452,\n 0.31759585, 0.32559839, 0.3229766 , 0.29249737, 0.3497408 ,\n 0.3497408 , 0.29701119, 0.33542451, 0.3667161 , 0.25646672,\n 0.3397337 , 0.3197957 , 0.27201914, 0.32949398, 0.29906639,\n 0.34274741, 0.34986815, 0.35520252, 0.32011405, 0.36594182,\n 0.33101119, 0.32877291, 0.25912301, 0.2478462 , 0.26942458,\n 0.34077006, 0.31798394, 0.32320966, 0.33487488, 0.32711781,\n 0.36289343, 0.31705419, 0.34871284, 0.2804347 , 0.38947447,\n 0.27189159, 0.27307095, 0.3186909 , 0.30854022, 0.29578988,\n...\n 0.34625806, 0.29489328, 0.34468211, 0.34551834, 0.23935419,\n 0.26514169, 0.28025949, 0.35004328, 0.29058023, 0.29203267,\n 0.34599638, 0.33580017, 0.31974638, 0.28484073, 0.2988122 ,\n 0.28493504, 0.32060063, 0.32060063, 0.35173727, 0.35173727,\n 0.32037305, 0.29799165, 0.30771619, 0.3586128 , 0.36013229,\n 0.28874102, 0.23980045, 0.34640269, 0.36762165, 0.28970446,\n 0.31491346, 0.34965261, 0.30561172, 0.32474351, 0.31110917,\n 0.32384985, 0.2904811 , 0.34095079, 0.34095079, 0.31485101,\n 0.38132514, 0.32489155, 0.30858242, 0.30720897, 0.27580073,\n 0.31721488, 0.35208387, 0.36486491, 0.31197273, 0.33068136,\n 0.32514847, 0.31977971, 0.35046923, 0.36023098, 0.32277802,\n 0.31549319, 0.31140812, 0.38195499, 0.37276456, 0.33471591,\n 0.33081849, 0.30769851, 0.35551393, 0.28576776, 0.29585197,\n 0.27925685, 0.28938454, 0.29943291, 0.3047938 , 0.33065354,\n 0.40499531, 0.34709934, 0.31859324, 0.31686934, 0.31208422,\n 0.32286717, 0.33943611, 0.34707295, 0.38282416, 0.26745377,\n 0.29287146, 0.27454074, 0.296987 , 0.36951932, 0.33891683,\n 0.30451362, 0.29623416, 0.33414641, 0.362058 , 0.40719629,\n 0.37203327, 0.38742165, 0.34120491, 0.33838723, 0.34123743,\n 0.31854203, 0.31334916, 0.30420243, 0.39870727, 0.27833092]]) v_x
(chain, draw)
float64
0.824 0.8574 ... 0.8455 0.8247
array([[0.82400313, 0.85740513, 0.76975875, 0.86153939, 0.78355432,\n 0.82428925, 0.77932323, 0.81988634, 0.80711439, 0.83031231,\n 0.78737928, 0.8986154 , 0.84210175, 0.7958843 , 0.82041358,\n 0.8292686 , 0.82382431, 0.82285073, 0.81337146, 0.84268655,\n 0.89860289, 0.88311196, 0.91064345, 0.90138381, 0.90669464,\n 0.8967999 , 0.91002254, 0.9169652 , 0.89794 , 0.93042356,\n 0.93446177, 0.70835644, 0.91233285, 0.85144598, 0.83456266,\n 0.8290329 , 0.81444234, 0.88087074, 0.77920687, 0.86576433,\n 0.81985114, 0.79186841, 0.83991579, 0.85903172, 0.76863267,\n 0.85246533, 0.81940821, 0.81930342, 0.87029796, 0.74204628,\n 0.85306934, 0.84275389, 0.86740391, 0.77410513, 0.79759315,\n 0.7515212 , 0.85184166, 0.72679256, 0.84352621, 0.82359078,\n 0.79648528, 0.87351594, 0.83748313, 0.90339832, 0.87206186,\n 0.87206186, 0.76229114, 0.88372973, 0.83747214, 0.81784272,\n 0.82813433, 0.82170434, 0.79001514, 0.75260208, 0.8156169 ,\n 0.73617422, 0.9050173 , 0.88294549, 0.90273773, 0.90916323,\n 0.80434117, 0.73294975, 0.8636117 , 0.86067914, 0.82959535,\n 0.84638007, 0.816017 , 0.84470792, 0.87985083, 0.87128435,\n 0.86555798, 0.83473031, 0.79162873, 0.88017827, 0.82321408,\n 0.75590967, 0.84430173, 0.8733812 , 0.89363356, 0.8216821 ,\n...\n 0.76962929, 0.84499643, 0.74717143, 0.80086246, 0.84870765,\n 0.74368742, 0.75147943, 0.87099038, 0.74879205, 0.70610118,\n 0.90436445, 0.73001066, 0.77902638, 0.8458565 , 0.82268434,\n 0.79680571, 0.85664265, 0.85664265, 0.75759715, 0.75759715,\n 0.90144033, 0.86380235, 0.82156605, 0.8637015 , 0.92234067,\n 0.74598687, 0.78055547, 0.88265262, 0.78107804, 0.81815287,\n 0.90629552, 0.78429789, 0.83360884, 0.75983678, 0.8448102 ,\n 0.77766383, 0.856166 , 0.76413481, 0.76413481, 0.92627377,\n 0.88972706, 0.90620388, 0.79637766, 0.7498275 , 0.87075842,\n 0.81200059, 0.76710933, 0.88927761, 0.72118468, 0.89378764,\n 0.7695108 , 0.81382184, 0.87591723, 0.86449875, 0.80734182,\n 0.79012357, 0.79812443, 0.81078277, 0.81564491, 0.85298289,\n 0.83405387, 0.78617852, 0.77699004, 0.83846896, 0.78790265,\n 0.73306843, 0.8583542 , 0.84151061, 0.81540006, 0.86829528,\n 0.84432797, 0.84524329, 0.77426202, 0.81484937, 0.76454091,\n 0.91868472, 0.8251975 , 0.8601104 , 0.83838518, 0.83053675,\n 0.75960094, 0.88857091, 0.82004165, 0.84701496, 0.94663644,\n 0.92739644, 0.93956552, 0.81951116, 0.85478557, 0.92620105,\n 0.73197654, 0.92144967, 0.91454354, 0.78410084, 0.86648562,\n 0.86408229, 0.76037249, 0.78192514, 0.84545403, 0.82472248]]) z
(chain, draw)
float64
0.4879 0.4997 ... 0.4699 0.5145
array([[0.48793357, 0.49967852, 0.48097186, 0.53108984, 0.49678664,\n 0.49744167, 0.52213647, 0.51380724, 0.48929812, 0.50171242,\n 0.50807177, 0.48628078, 0.46771342, 0.52959039, 0.53736978,\n 0.47967018, 0.48740379, 0.50545674, 0.50606103, 0.49495431,\n 0.49810731, 0.49951503, 0.50490618, 0.48962975, 0.50591468,\n 0.51196065, 0.49876528, 0.50093489, 0.49712869, 0.50624838,\n 0.5050166 , 0.49539939, 0.48047467, 0.49734438, 0.48639449,\n 0.49503224, 0.48237508, 0.5103302 , 0.49643521, 0.49794998,\n 0.50543506, 0.50605143, 0.48366647, 0.49078947, 0.49432243,\n 0.5098502 , 0.5047392 , 0.48235156, 0.50533222, 0.50368641,\n 0.49037254, 0.49575325, 0.49551749, 0.51194524, 0.52304424,\n 0.50225696, 0.48408367, 0.49008342, 0.50349641, 0.505484 ,\n 0.50090937, 0.50333918, 0.50229001, 0.51301869, 0.49129331,\n 0.49129331, 0.51075397, 0.49376643, 0.49297369, 0.52132877,\n 0.4989994 , 0.49265556, 0.515693 , 0.49500711, 0.49341592,\n 0.4993158 , 0.51598565, 0.49942951, 0.48918374, 0.50260612,\n 0.50936012, 0.47236434, 0.51502862, 0.51226401, 0.51244568,\n 0.50270365, 0.51003543, 0.49025671, 0.49206047, 0.48906362,\n 0.4976392 , 0.49669684, 0.50057352, 0.50374138, 0.47829983,\n 0.52435592, 0.51098453, 0.51104457, 0.49173874, 0.48882439,\n...\n 0.49836444, 0.49786753, 0.50530067, 0.52145878, 0.49432409,\n 0.52560344, 0.51841541, 0.49535198, 0.49871608, 0.50706148,\n 0.50841435, 0.48473043, 0.47966263, 0.50244304, 0.50958603,\n 0.52715335, 0.49378863, 0.49378863, 0.49746739, 0.49746739,\n 0.50260082, 0.50651844, 0.5052582 , 0.49741282, 0.49050137,\n 0.5282347 , 0.51924281, 0.48782253, 0.49700215, 0.49474879,\n 0.49334079, 0.49876032, 0.48803508, 0.50538689, 0.48990384,\n 0.50773207, 0.51374517, 0.49117368, 0.49117368, 0.49419916,\n 0.50444418, 0.500878 , 0.50313953, 0.50962648, 0.49165823,\n 0.49732927, 0.5035249 , 0.51792317, 0.50101674, 0.4973693 ,\n 0.50613025, 0.5011646 , 0.48979645, 0.50144033, 0.49014401,\n 0.49189512, 0.50401425, 0.4756525 , 0.48085838, 0.50687202,\n 0.50805201, 0.49211101, 0.49179423, 0.51203422, 0.50992392,\n 0.51406267, 0.5098954 , 0.5058715 , 0.51179853, 0.50888893,\n 0.4654464 , 0.49781324, 0.50056263, 0.49970487, 0.5026967 ,\n 0.50556082, 0.49306406, 0.49254181, 0.47476832, 0.51735156,\n 0.51367483, 0.52249344, 0.51460332, 0.47955772, 0.51116876,\n 0.50580645, 0.50237354, 0.49622703, 0.49780105, 0.45939825,\n 0.48722186, 0.49744381, 0.50171777, 0.49156675, 0.49321686,\n 0.51307053, 0.49563626, 0.5083985 , 0.46994839, 0.51450246]]) v_y
(chain, draw)
float64
0.2902 0.3163 ... 0.2837 0.2524
array([[0.29017422, 0.31634267, 0.2942563 , 0.21629518, 0.31034113,\n 0.20711809, 0.32621438, 0.26260628, 0.28101476, 0.25256638,\n 0.25934615, 0.29154261, 0.30881967, 0.24955637, 0.22944398,\n 0.30668374, 0.30954961, 0.28787639, 0.22065215, 0.28677964,\n 0.24715871, 0.2450021 , 0.33223632, 0.29666575, 0.31587413,\n 0.36523771, 0.29983965, 0.28653943, 0.27948597, 0.29262438,\n 0.29411822, 0.24108595, 0.29436102, 0.39809497, 0.36816282,\n 0.37968063, 0.32600763, 0.28548601, 0.23660427, 0.21794168,\n 0.29932178, 0.26995017, 0.26663419, 0.26602792, 0.15257248,\n 0.22081287, 0.30215959, 0.25003201, 0.21151757, 0.25873215,\n 0.30619321, 0.30921297, 0.33589868, 0.20345808, 0.20029464,\n 0.18574259, 0.28290351, 0.19951222, 0.36297506, 0.28001125,\n 0.25443351, 0.24951994, 0.27930735, 0.28925458, 0.2697804 ,\n 0.2697804 , 0.27108651, 0.21550766, 0.22454648, 0.29978819,\n 0.21197959, 0.28119096, 0.25406235, 0.24536572, 0.26321674,\n 0.32394694, 0.23551021, 0.26901651, 0.30505569, 0.32549175,\n 0.21976427, 0.24993848, 0.33069586, 0.32537588, 0.30757335,\n 0.19225521, 0.22941674, 0.21732537, 0.17329308, 0.16304591,\n 0.24773196, 0.32570466, 0.33095861, 0.28486927, 0.30064662,\n 0.26483182, 0.2962886 , 0.36672065, 0.3459258 , 0.18645811,\n...\n 0.24594648, 0.27644235, 0.24773065, 0.25941413, 0.22772718,\n 0.26669591, 0.25407115, 0.26460934, 0.26004812, 0.26885782,\n 0.28205356, 0.27275135, 0.29642237, 0.32992261, 0.25675461,\n 0.22315518, 0.28939842, 0.28939842, 0.31071653, 0.31071653,\n 0.26829765, 0.23788815, 0.27016431, 0.24758315, 0.28872393,\n 0.23870994, 0.22734983, 0.25749483, 0.2246537 , 0.28044976,\n 0.2528612 , 0.28657881, 0.26783476, 0.2734338 , 0.2717309 ,\n 0.1530987 , 0.32219967, 0.18836714, 0.18836714, 0.25413059,\n 0.28938615, 0.25387767, 0.28706335, 0.29485522, 0.26731664,\n 0.30196498, 0.28860952, 0.22991785, 0.30448908, 0.32293909,\n 0.26729461, 0.2788672 , 0.29401603, 0.31155997, 0.3104029 ,\n 0.33856865, 0.23689423, 0.31204555, 0.31653734, 0.26621382,\n 0.26676083, 0.32313996, 0.21954126, 0.23851233, 0.26744598,\n 0.33869598, 0.32833688, 0.30394758, 0.20439067, 0.32165122,\n 0.23801759, 0.31423444, 0.31035577, 0.24399906, 0.26722865,\n 0.36226421, 0.28090412, 0.29821189, 0.32371105, 0.17387639,\n 0.3611812 , 0.13617164, 0.39616812, 0.22207321, 0.26251743,\n 0.28704543, 0.31179994, 0.24029406, 0.31564952, 0.3150068 ,\n 0.30478641, 0.29148262, 0.27750037, 0.29620782, 0.224372 ,\n 0.27171446, 0.26218865, 0.25599688, 0.28373982, 0.25242507]]) a
(chain, draw)
float64
1.514 1.503 1.529 ... 1.477 1.471
array([[1.51400418, 1.50322753, 1.52857363, 1.50837306, 1.44683951,\n 1.45394571, 1.47416138, 1.51746144, 1.44311282, 1.50755773,\n 1.50066412, 1.48173732, 1.45546682, 1.49014253, 1.52100204,\n 1.41613583, 1.4674459 , 1.47390728, 1.46677935, 1.50267892,\n 1.46798012, 1.46641566, 1.45319187, 1.45585789, 1.45170435,\n 1.47103584, 1.48887584, 1.51279227, 1.47378476, 1.47261684,\n 1.47955021, 1.48279251, 1.53594945, 1.51336522, 1.55598672,\n 1.56782094, 1.48773737, 1.44012273, 1.48520091, 1.53399161,\n 1.52110888, 1.53482876, 1.52810328, 1.48638522, 1.52708596,\n 1.47974797, 1.46540422, 1.48482259, 1.50466258, 1.48236905,\n 1.42957812, 1.42537967, 1.43300134, 1.53613906, 1.47441954,\n 1.50787525, 1.42341563, 1.47695996, 1.46146447, 1.47013235,\n 1.49103817, 1.46558268, 1.47969287, 1.48663201, 1.47766745,\n 1.47766745, 1.46431007, 1.51972856, 1.51322787, 1.48666808,\n 1.47464552, 1.46833378, 1.47154514, 1.42121913, 1.40448536,\n 1.49418469, 1.47628743, 1.51123866, 1.54382027, 1.53747081,\n 1.45206867, 1.4626564 , 1.49031317, 1.48207642, 1.50389723,\n 1.49002013, 1.48851795, 1.43900484, 1.48563444, 1.49315442,\n 1.50350229, 1.51864873, 1.53731047, 1.47961998, 1.51374263,\n 1.45170607, 1.45130244, 1.476734 , 1.48419311, 1.46717676,\n...\n 1.49821648, 1.45034999, 1.46633287, 1.51779129, 1.43709592,\n 1.51396522, 1.50350047, 1.4626857 , 1.44732575, 1.42042009,\n 1.494075 , 1.44645201, 1.45365875, 1.48998387, 1.4814314 ,\n 1.47260284, 1.48204142, 1.48204142, 1.44617981, 1.44617981,\n 1.51229937, 1.48413876, 1.48109073, 1.51358078, 1.47063296,\n 1.46231905, 1.45798685, 1.48671655, 1.50966961, 1.46222842,\n 1.53618504, 1.44032146, 1.43878229, 1.46222403, 1.48678255,\n 1.47047006, 1.47396577, 1.46505899, 1.46505899, 1.49365707,\n 1.4699265 , 1.47088857, 1.49176094, 1.47057836, 1.49200719,\n 1.48264218, 1.47080297, 1.50509075, 1.44798084, 1.48137008,\n 1.41670304, 1.46097117, 1.47331495, 1.51677088, 1.48715666,\n 1.50347795, 1.42507791, 1.48949415, 1.45122816, 1.49123793,\n 1.4865125 , 1.46545836, 1.46535094, 1.46794441, 1.46529231,\n 1.46935117, 1.47936183, 1.45978012, 1.45358503, 1.47178064,\n 1.47864506, 1.51231759, 1.51205315, 1.44342324, 1.47649997,\n 1.47639823, 1.4942417 , 1.46318844, 1.49067514, 1.5211519 ,\n 1.40755717, 1.50572543, 1.49126262, 1.53397265, 1.47872111,\n 1.4955524 , 1.50183558, 1.46147211, 1.47414351, 1.48653921,\n 1.48878563, 1.51436992, 1.52703956, 1.44742495, 1.52281136,\n 1.51107446, 1.46842941, 1.46416066, 1.47703907, 1.4709202 ]]) t
(chain, draw)
float64
0.1076 0.08356 ... 0.1176 0.1185
array([[0.10761804, 0.08356381, 0.08614957, 0.09233716, 0.13723529,\n 0.13634991, 0.13388109, 0.12511015, 0.13567446, 0.10455672,\n 0.12393897, 0.12275025, 0.12365915, 0.11519734, 0.11714997,\n 0.14315585, 0.14213362, 0.12416233, 0.12132475, 0.10083494,\n 0.10619329, 0.12359021, 0.14866404, 0.14572634, 0.13956068,\n 0.14414991, 0.12760335, 0.1249362 , 0.13391754, 0.11972567,\n 0.12215321, 0.11638073, 0.08892592, 0.0910824 , 0.0989878 ,\n 0.09151818, 0.0992311 , 0.1314375 , 0.08377884, 0.10576299,\n 0.07068413, 0.07392324, 0.08414513, 0.07901942, 0.07799246,\n 0.11977701, 0.12762887, 0.10363169, 0.09579083, 0.12362506,\n 0.15014695, 0.13196477, 0.16113445, 0.11647077, 0.1032688 ,\n 0.12702943, 0.16118922, 0.09046228, 0.11471169, 0.12962771,\n 0.11915477, 0.1293102 , 0.1341493 , 0.12063856, 0.13534065,\n 0.13534065, 0.11679947, 0.09520892, 0.1007222 , 0.11667787,\n 0.12398423, 0.13742443, 0.14708596, 0.12491343, 0.12132005,\n 0.11060289, 0.16062694, 0.07619101, 0.10014439, 0.09760127,\n 0.13073326, 0.12044282, 0.11443075, 0.10993101, 0.12201642,\n 0.12183641, 0.14335642, 0.12588039, 0.11698529, 0.11471395,\n 0.10906027, 0.10341983, 0.10164342, 0.12394127, 0.10242053,\n 0.15380877, 0.14383382, 0.13418067, 0.12135504, 0.10406777,\n...\n 0.1180265 , 0.1220359 , 0.1483663 , 0.11731828, 0.14782378,\n 0.10923994, 0.11117522, 0.14682319, 0.107591 , 0.13718418,\n 0.11589032, 0.12037488, 0.12607763, 0.10969267, 0.12273298,\n 0.14066653, 0.11307823, 0.11307823, 0.13090011, 0.13090011,\n 0.09637677, 0.11012864, 0.12250937, 0.11712368, 0.11311905,\n 0.12340101, 0.14884929, 0.1096291 , 0.10851559, 0.13538341,\n 0.10291837, 0.14659764, 0.12904532, 0.13606136, 0.10306971,\n 0.11602825, 0.13080305, 0.12925459, 0.12925459, 0.11167242,\n 0.12353874, 0.14544714, 0.12950392, 0.09615143, 0.12388212,\n 0.13002152, 0.12787945, 0.12007151, 0.12050301, 0.1430875 ,\n 0.14294198, 0.15165973, 0.12514079, 0.12893165, 0.10285508,\n 0.11272578, 0.12050938, 0.15011385, 0.11004251, 0.14430113,\n 0.11398997, 0.12446459, 0.13003922, 0.12568369, 0.11898147,\n 0.11520509, 0.15158596, 0.10546542, 0.11407009, 0.11244434,\n 0.08630224, 0.10121852, 0.11008141, 0.12835788, 0.1392386 ,\n 0.13190289, 0.12424361, 0.12400238, 0.09793804, 0.08486731,\n 0.13039168, 0.11088056, 0.12130127, 0.08869407, 0.13872184,\n 0.10087323, 0.11383646, 0.12695471, 0.13920156, 0.10674212,\n 0.10907245, 0.12105388, 0.10773205, 0.12797064, 0.11106518,\n 0.12029273, 0.11402632, 0.14047405, 0.11757579, 0.11854279]]) Attributes: (8)
created_at : 2026-01-08T04:58:29.481075+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 49.984930753707886 tuning_steps : 500 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n log_likelihood \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 8MB\nDimensions: (chain: 2, draw: 500, __obs__: 1000)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 ... 493 494 495 496 497 498 499\n * __obs__ (__obs__) int64 8kB 0 1 2 3 4 5 6 ... 994 995 996 997 998 999\nData variables:\n rt,response (chain, draw, __obs__) float64 8MB -2.314 -2.193 ... -1.697\nAttributes:\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 \n \n
\n \n \n \n \n sample_stats \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 134kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 ... 495 496 497 498 499\nData variables: (12/18)\n divergences (chain, draw) int64 8kB 0 0 0 0 0 0 0 ... 0 0 0 0 0 0\n step_size_bar (chain, draw) float64 8kB 0.594 0.594 ... 0.5709\n smallest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n acceptance_rate (chain, draw) float64 8kB 0.7942 0.6379 ... 0.9569\n perf_counter_diff (chain, draw) float64 8kB 0.02535 0.02519 ... 0.02226\n n_steps (chain, draw) float64 8kB 7.0 7.0 7.0 ... 7.0 7.0 7.0\n ... ...\n diverging (chain, draw) bool 1kB False False ... False False\n energy_error (chain, draw) float64 8kB 0.2536 0.2799 ... -0.03638\n largest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n tree_depth (chain, draw) int64 8kB 3 3 3 3 3 3 3 ... 3 3 3 3 3 3\n max_energy_error (chain, draw) float64 8kB 0.5126 0.8966 ... 0.1649\n energy (chain, draw) float64 8kB 1.998e+03 ... 2e+03\nAttributes:\n created_at: 2026-01-08T04:58:29.498781+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 49.984930753707886\n tuning_steps: 500\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (18)
divergences
(chain, draw)
int64
0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n...\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) step_size_bar
(chain, draw)
float64
0.594 0.594 0.594 ... 0.5709 0.5709
array([[0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n 0.59398099, 0.59398099, 0.59398099, 0.59398099, 0.59398099,\n...\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208,\n 0.57085208, 0.57085208, 0.57085208, 0.57085208, 0.57085208]]) smallest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) acceptance_rate
(chain, draw)
float64
0.7942 0.6379 ... 0.7862 0.9569
array([[0.79424946, 0.63787081, 0.90830802, 0.9397147 , 0.97809984,\n 0.90623638, 0.99092039, 0.83944667, 0.93270912, 0.91808656,\n 0.86776189, 0.87613027, 0.77011506, 0.99875531, 0.93016917,\n 0.99113428, 0.98566243, 1. , 0.66787903, 0.90533648,\n 0.68592557, 1. , 0.9539135 , 0.86360511, 0.89199648,\n 0.58547984, 0.87949389, 0.76450531, 1. , 0.86851078,\n 1. , 0.75442727, 0.81908396, 0.48965278, 0.73453028,\n 1. , 1. , 0.59956934, 1. , 0.85541565,\n 0.78046864, 0.85739439, 0.58185834, 1. , 0.89966816,\n 0.96111581, 0.93786371, 1. , 0.83912959, 0.98235967,\n 0.65950949, 0.72803856, 0.7268331 , 0.70045295, 1. ,\n 0.79553051, 0.97288042, 0.34399072, 0.96755927, 0.96992109,\n 0.76382369, 0.74976626, 0.95330033, 0.83632793, 0.95277054,\n 0.54217602, 0.94942817, 0.91203564, 0.96022007, 0.85313926,\n 0.98835287, 0.87155482, 0.9620569 , 0.90419926, 0.70916437,\n 0.85253224, 0.29003768, 1. , 1. , 0.70729728,\n 0.76456242, 0.83754822, 1. , 0.91979576, 0.97931794,\n 0.8876915 , 0.90250023, 0.98545974, 0.92851072, 0.95326269,\n 0.98262908, 0.98655592, 0.75358124, 0.81666245, 0.98272395,\n 0.9677211 , 0.65451211, 0.96523288, 0.9086867 , 0.99641008,\n...\n 0.96872286, 0.98328783, 0.627855 , 0.94852288, 0.74323269,\n 0.73473108, 1. , 0.91991556, 0.83807164, 0.86047917,\n 0.81481313, 0.99626864, 0.98328779, 0.99537578, 0.89334391,\n 0.86797085, 0.9964621 , 0.61591328, 0.64623891, 0.02891636,\n 0.99190683, 0.87882557, 0.97781044, 0.68657668, 0.80882987,\n 0.77352423, 1. , 0.30469728, 0.77071565, 0.83454588,\n 0.66955954, 0.98639467, 0.75006808, 1. , 0.9348318 ,\n 0.76126579, 0.83645879, 0.93271664, 0.4629259 , 0.43372062,\n 0.62628008, 0.9303303 , 0.53409769, 0.62332237, 0.75822033,\n 0.96734822, 0.69516906, 0.67321654, 1. , 0.99050837,\n 0.96329954, 0.88014403, 0.98590582, 0.669961 , 0.86285119,\n 0.79282557, 0.86131786, 0.73264466, 0.81912461, 0.97863835,\n 1. , 0.89998913, 0.39431458, 0.87893208, 0.8099056 ,\n 0.62753254, 0.87306376, 0.7334853 , 0.9444276 , 0.99053464,\n 0.93379753, 1. , 0.86571053, 0.95203518, 0.71486276,\n 0.76371657, 0.98652744, 0.98690059, 0.63662037, 0.78558662,\n 0.88008889, 0.92759936, 0.92593205, 0.88272832, 0.59217123,\n 0.57426005, 0.96888612, 0.98353666, 0.88881784, 0.87278833,\n 0.98586852, 0.55055126, 0.99486084, 0.96596542, 0.89850141,\n 0.83502983, 1. , 0.76140931, 0.78622672, 0.95686981]]) perf_counter_diff
(chain, draw)
float64
0.02535 0.02519 ... 0.02295 0.02226
array([[0.025347 , 0.02518724, 0.02592335, 0.02693033, 0.0268955 ,\n 0.02724509, 0.02689668, 0.02641207, 0.02683135, 0.0249227 ,\n 0.01257306, 0.02622019, 0.0263378 , 0.02632399, 0.01329286,\n 0.02645154, 0.01906267, 0.0310496 , 0.01254658, 0.02549785,\n 0.02527454, 0.025397 , 0.02500911, 0.02666133, 0.01302057,\n 0.01306755, 0.02923459, 0.01605233, 0.01337244, 0.02742618,\n 0.01337679, 0.02642957, 0.02729643, 0.03466279, 0.02953638,\n 0.0168396 , 0.02686404, 0.02627087, 0.02682562, 0.0261888 ,\n 0.02566658, 0.01264723, 0.02640904, 0.01308972, 0.02621884,\n 0.02478855, 0.02643338, 0.02448448, 0.02582451, 0.02650033,\n 0.02503164, 0.01256801, 0.01284376, 0.02610608, 0.01335386,\n 0.01322916, 0.02636484, 0.02571132, 0.02584893, 0.02493199,\n 0.0252865 , 0.0247739 , 0.01197984, 0.01273314, 0.02597552,\n 0.03217296, 0.0267567 , 0.02495133, 0.01272931, 0.02584989,\n 0.02655493, 0.01491651, 0.03295751, 0.05146933, 0.02509735,\n 0.02632012, 0.02740659, 0.02640527, 0.02600404, 0.01348846,\n 0.02541057, 0.02642171, 0.02697821, 0.01364237, 0.01373881,\n 0.02718246, 0.02767088, 0.02637063, 0.02631202, 0.01298465,\n 0.025124 , 0.02549219, 0.01196263, 0.02475345, 0.02615051,\n 0.02630154, 0.01355715, 0.02594161, 0.02595703, 0.02615023,\n...\n 0.02879996, 0.02662276, 0.01387162, 0.02771733, 0.05222714,\n 0.02693961, 0.01321148, 0.03188938, 0.01275427, 0.02620294,\n 0.02674361, 0.0272643 , 0.02724496, 0.02781843, 0.01366068,\n 0.02631499, 0.02668425, 0.02534692, 0.01316415, 0.02670722,\n 0.02515669, 0.028861 , 0.01877823, 0.02687393, 0.02588512,\n 0.02738013, 0.01373545, 0.02716203, 0.02600836, 0.02588837,\n 0.02665201, 0.01827759, 0.00904789, 0.02850113, 0.01861042,\n 0.01849631, 0.01917261, 0.01890433, 0.01965714, 0.01917594,\n 0.01958168, 0.01912759, 0.01912234, 0.01018035, 0.01008435,\n 0.01939092, 0.00943305, 0.01981764, 0.01914476, 0.01834781,\n 0.01859687, 0.00936021, 0.01889758, 0.01959491, 0.01895993,\n 0.00968627, 0.01959454, 0.03921376, 0.01033098, 0.02046217,\n 0.00995325, 0.00981963, 0.02000039, 0.02044226, 0.01002708,\n 0.01039915, 0.01957397, 0.01019804, 0.01985919, 0.01969863,\n 0.02071924, 0.02005741, 0.01962814, 0.01391399, 0.00939561,\n 0.01927151, 0.01985299, 0.00978019, 0.04073355, 0.02043081,\n 0.02015535, 0.02042139, 0.02006632, 0.03023174, 0.0393309 ,\n 0.00993495, 0.01948006, 0.01948799, 0.01925847, 0.02021637,\n 0.01982974, 0.03906148, 0.01927566, 0.01889922, 0.01929197,\n 0.02022661, 0.02070874, 0.02216651, 0.02295377, 0.02226243]]) n_steps
(chain, draw)
float64
7.0 7.0 7.0 7.0 ... 7.0 7.0 7.0 7.0
array([[ 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 3., 7., 7.,\n 7., 3., 7., 3., 7., 3., 7., 7., 7., 7., 7., 3., 3.,\n 7., 3., 3., 7., 3., 7., 7., 7., 7., 3., 7., 7., 7.,\n 7., 7., 3., 7., 3., 7., 7., 7., 7., 7., 7., 7., 3.,\n 3., 7., 3., 3., 7., 7., 7., 7., 7., 7., 3., 3., 7.,\n 7., 7., 7., 3., 7., 7., 3., 7., 15., 7., 7., 7., 7.,\n 7., 3., 7., 7., 7., 3., 3., 7., 7., 7., 7., 3., 7.,\n 7., 3., 7., 7., 7., 3., 7., 7., 7., 3., 7., 7., 7.,\n 7., 3., 3., 7., 3., 3., 7., 7., 7., 7., 3., 3., 7.,\n 7., 7., 7., 7., 7., 3., 7., 3., 7., 7., 3., 7., 7.,\n 7., 7., 7., 7., 3., 3., 7., 3., 7., 7., 7., 7., 3.,\n 7., 7., 7., 7., 7., 3., 7., 7., 7., 7., 3., 3., 7.,\n 7., 3., 7., 7., 7., 7., 3., 7., 3., 7., 7., 7., 7.,\n 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 3., 7., 7.,\n 7., 3., 7., 7., 7., 7., 7., 3., 7., 3., 7., 7., 3.,\n 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 1., 7., 7.,\n 7., 7., 7., 7., 3., 3., 7., 7., 3., 15., 3., 7., 3.,\n 7., 3., 7., 3., 7., 7., 7., 7., 7., 7., 3., 7., 7.,\n 3., 7., 7., 3., 7., 7., 7., 7., 7., 3., 7., 7., 7.,\n 7., 7., 7., 7., 7., 7., 7., 7., 7., 3., 7., 7., 7.,\n...\n 7., 7., 3., 7., 7., 3., 7., 7., 7., 7., 7., 7., 7.,\n 7., 7., 3., 7., 7., 7., 7., 7., 3., 7., 7., 7., 7.,\n 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7.,\n 7., 7., 7., 7., 3., 7., 3., 3., 7., 7., 7., 7., 3.,\n 3., 7., 3., 7., 7., 3., 7., 7., 7., 7., 7., 7., 3.,\n 3., 7., 7., 7., 7., 7., 7., 3., 7., 7., 7., 7., 7.,\n 7., 7., 7., 7., 7., 7., 7., 7., 3., 7., 3., 7., 3.,\n 7., 3., 3., 7., 3., 7., 7., 3., 3., 3., 7., 7., 7.,\n 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 3., 3.,\n 7., 7., 7., 7., 3., 3., 7., 7., 7., 7., 3., 7., 7.,\n 15., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7., 7.,\n 7., 7., 7., 7., 7., 3., 7., 3., 7., 3., 7., 7., 3.,\n 7., 15., 7., 3., 7., 3., 7., 7., 7., 7., 7., 3., 7.,\n 7., 7., 3., 7., 7., 7., 3., 7., 7., 7., 3., 7., 7.,\n 7., 7., 7., 3., 11., 7., 7., 7., 7., 7., 7., 7., 7.,\n 7., 3., 3., 7., 3., 7., 7., 7., 7., 3., 7., 7., 7.,\n 3., 7., 15., 3., 7., 3., 3., 7., 7., 3., 3., 7., 3.,\n 7., 7., 7., 7., 7., 5., 3., 7., 7., 3., 15., 7., 7.,\n 7., 7., 11., 15., 3., 7., 7., 7., 7., 7., 15., 7., 7.,\n 7., 7., 7., 7., 7., 7.]]) process_time_diff
(chain, draw)
float64
0.02535 0.02515 ... 0.0229 0.0222
array([[0.02534815, 0.02515372, 0.02592396, 0.02693145, 0.02689603,\n 0.02724618, 0.02687579, 0.02641276, 0.02683203, 0.02492329,\n 0.01257468, 0.02620787, 0.02633866, 0.02632449, 0.01327334,\n 0.0264525 , 0.01906411, 0.03103293, 0.01254763, 0.02549949,\n 0.02527495, 0.02539862, 0.02499977, 0.02666292, 0.01302244,\n 0.01306975, 0.02923605, 0.01605286, 0.01337351, 0.02741718,\n 0.01337827, 0.02642925, 0.02729738, 0.03466393, 0.02953747,\n 0.01684158, 0.02686525, 0.02623354, 0.02682671, 0.0261509 ,\n 0.02566817, 0.01264963, 0.02638326, 0.01307898, 0.02615848,\n 0.02478947, 0.02643431, 0.02447328, 0.02579918, 0.02645085,\n 0.02503302, 0.0125474 , 0.01282416, 0.02610703, 0.01335529,\n 0.01320214, 0.02634065, 0.02566357, 0.02581466, 0.02493337,\n 0.02520108, 0.02477522, 0.0119828 , 0.0127343 , 0.02597635,\n 0.03217401, 0.02675749, 0.02495226, 0.01273091, 0.02585065,\n 0.02655685, 0.01491998, 0.03295897, 0.05146957, 0.02509858,\n 0.02632142, 0.02740738, 0.02640585, 0.02600494, 0.01349041,\n 0.02541141, 0.02642313, 0.0269788 , 0.01364346, 0.01373958,\n 0.02718367, 0.02767182, 0.02636141, 0.02631331, 0.01298625,\n 0.02512317, 0.02549319, 0.01196375, 0.02475501, 0.02615115,\n 0.02630265, 0.01355824, 0.02594288, 0.02593024, 0.02614505,\n...\n 0.02876538, 0.02662386, 0.01384411, 0.0277181 , 0.05222688,\n 0.02690707, 0.01321285, 0.03189052, 0.01275624, 0.02609929,\n 0.02674509, 0.02723481, 0.02717362, 0.02781944, 0.01363123,\n 0.02631554, 0.02668555, 0.02529329, 0.01313305, 0.02672107,\n 0.02510807, 0.02883853, 0.0187581 , 0.02687606, 0.02584115,\n 0.02736777, 0.01369967, 0.02708163, 0.02595832, 0.02583385,\n 0.02654922, 0.01827758, 0.0090491 , 0.0284309 , 0.01858572,\n 0.01849055, 0.01913912, 0.01889052, 0.01962157, 0.01915291,\n 0.01958261, 0.01912861, 0.01912336, 0.01015349, 0.01005827,\n 0.0193849 , 0.00943438, 0.0197864 , 0.01912676, 0.01834215,\n 0.01859778, 0.00936169, 0.01889844, 0.01956794, 0.01896078,\n 0.00966013, 0.01959547, 0.03912999, 0.01030701, 0.02042664,\n 0.0099259 , 0.00982129, 0.01996506, 0.02039773, 0.01000614,\n 0.0104001 , 0.01955862, 0.01017029, 0.0198217 , 0.01966941,\n 0.02070128, 0.02003576, 0.01960036, 0.01388472, 0.00938161,\n 0.01920572, 0.01980807, 0.00976559, 0.04071311, 0.02042336,\n 0.02015634, 0.02038997, 0.02006785, 0.03022164, 0.03926109,\n 0.00993568, 0.01948131, 0.01945955, 0.01926481, 0.02019754,\n 0.0198311 , 0.03897204, 0.01927609, 0.0188998 , 0.01927829,\n 0.02022737, 0.020696 , 0.02216002, 0.02289563, 0.02220184]]) lp
(chain, draw)
float64
-1.996e+03 ... -1.995e+03
array([[-1996.08355774, -1997.67687544, -1998.29888124, -2002.02526684,\n -1997.40710775, -1997.05312845, -1997.38496381, -1996.29853057,\n -1995.57843875, -1995.83031274, -1995.07106656, -1996.19759078,\n -2001.63592633, -1999.46866297, -2000.82175244, -2000.27647128,\n -1996.88771178, -1994.17650739, -1994.79779652, -1995.3280404 ,\n -1998.00151417, -1995.9413922 , -1998.11468537, -1997.51819128,\n -1997.64945998, -1998.83297173, -1996.68685036, -2000.57740763,\n -1995.88129335, -1998.31875543, -1997.95765405, -1998.17020531,\n -1998.95915417, -2001.02877918, -2001.18747371, -2002.13208718,\n -1996.52128929, -1998.08052999, -1997.56002301, -1997.07759811,\n -1999.23062675, -1998.36251022, -1999.93859951, -1998.81820303,\n -2001.51228641, -1995.31847541, -1995.72787699, -1995.56061393,\n -1997.21994287, -1996.06948451, -1997.36133146, -1997.44609673,\n -1998.85922622, -2000.00917238, -1999.10614675, -1998.58889207,\n -1999.16221405, -1999.54338436, -1998.07908148, -1994.44495752,\n -1994.22458065, -1995.06501795, -1994.27137575, -1996.77469654,\n -1995.37469235, -1995.37469235, -1995.51379958, -1997.08177184,\n -1996.34534017, -1996.99936846, -1994.9451252 , -1994.90901679,\n -1996.17339276, -1997.87564419, -2000.94555439, -1997.36844964,\n -2001.00644905, -1999.47683687, -1999.28028619, -1999.34730155,\n...\n -1996.97194581, -1995.56440152, -1994.01429296, -1995.85952684,\n -1998.08280234, -1998.64039616, -1998.08158868, -1995.65990305,\n -1997.02970934, -1995.75107201, -1998.38511818, -1996.16938103,\n -1996.64204823, -1995.14877409, -1995.60622365, -1998.18885176,\n -1995.82263231, -1996.60796838, -1996.60796838, -1997.63545555,\n -1998.89915997, -1996.74064994, -1994.78890789, -1998.14605601,\n -1998.48483284, -1994.64529017, -1995.898444 , -2000.67403652,\n -1997.34040733, -1996.90235852, -1997.37676015, -1995.49609716,\n -1995.32172464, -1997.0755377 , -1995.51281838, -1996.78723552,\n -1997.78611031, -2001.55902033, -1997.86261479, -1996.03682286,\n -1994.84767826, -1995.9369665 , -1995.76784763, -1995.05206664,\n -1994.85206362, -1998.53350387, -1997.57285636, -1997.16621089,\n -1997.11100623, -1997.06758159, -2000.90900456, -1995.66957644,\n -1995.99937312, -1995.02511361, -1995.41547284, -1998.56916374,\n -1994.55156343, -1995.18693108, -1997.69976642, -2000.84484828,\n -2002.89713548, -2002.50440827, -1999.08991384, -1998.16889364,\n -1999.35901339, -1998.70811866, -1999.04573832, -1994.37688529,\n -1996.0012955 , -2002.81733295, -1998.04820787, -1998.99939172,\n -1997.30950326, -1995.41239608, -1996.41155352, -1995.71655706,\n -1995.09435436, -1994.74015636, -1997.94552091, -1995.39322019]]) perf_counter_start
(chain, draw)
float64
2.618e+03 2.618e+03 ... 2.633e+03
array([[2617.76568572, 2617.79134283, 2617.81682013, 2617.84298204,\n 2617.8701868 , 2617.89733027, 2617.92485033, 2617.95199504,\n 2617.97865781, 2618.00572702, 2618.03088657, 2618.04370122,\n 2618.07018349, 2618.09677573, 2618.12334065, 2618.13687092,\n 2618.16355541, 2618.182946 , 2618.2142292 , 2618.22702289,\n 2618.25276803, 2618.27829874, 2618.30396137, 2618.32921707,\n 2618.35613738, 2618.36939932, 2618.38275024, 2618.41233843,\n 2618.42874876, 2618.44235808, 2618.47006251, 2618.48377082,\n 2618.51042922, 2618.53797211, 2618.57297237, 2618.60283905,\n 2618.61993155, 2618.64705025, 2618.67355596, 2618.70064838,\n 2618.72708623, 2618.75299769, 2618.76595259, 2618.79257759,\n 2618.80590709, 2618.83236716, 2618.85749863, 2618.8841969 ,\n 2618.90891741, 2618.93497611, 2618.96170827, 2618.98698098,\n 2618.99979908, 2619.01287766, 2619.03923859, 2619.05283606,\n 2619.06634106, 2619.09294706, 2619.11890153, 2619.14498957,\n 2619.17018842, 2619.19571156, 2619.22072043, 2619.23298038,\n 2619.2459437 , 2619.27216765, 2619.30466534, 2619.33167249,\n 2619.35685725, 2619.36982407, 2619.39591029, 2619.42270655,\n 2619.43792289, 2619.4711463 , 2619.52287181, 2619.54824337,\n 2619.57489661, 2619.60254589, 2619.62919754, 2619.65544103,\n...\n 2630.08642979, 2630.11182269, 2630.14101227, 2630.16012065,\n 2630.1873128 , 2630.2134259 , 2630.2410637 , 2630.2550408 ,\n 2630.28243819, 2630.3086969 , 2630.33482167, 2631.27025109,\n 2631.28872358, 2631.29798755, 2631.32668216, 2631.34549972,\n 2631.36420544, 2631.38358826, 2631.40269503, 2631.4225456 ,\n 2631.44192688, 2631.46169644, 2631.4810145 , 2631.50038173,\n 2631.51075326, 2631.52104065, 2631.54065066, 2631.55030555,\n 2631.57032344, 2631.58966486, 2631.60825503, 2631.62704905,\n 2631.63662139, 2631.65572297, 2631.67551409, 2631.69466275,\n 2631.704539 , 2631.72432196, 2631.76374872, 2631.77427362,\n 2631.7949591 , 2631.80511491, 2631.81516987, 2631.83537037,\n 2631.85602649, 2631.86629338, 2631.87690219, 2631.89668654,\n 2631.90710749, 2631.92721509, 2631.94716442, 2631.96809121,\n 2631.98834707, 2632.00820547, 2632.02232227, 2632.03193201,\n 2632.0514091 , 2632.07146386, 2632.08144734, 2632.12238562,\n 2632.14302254, 2632.16338309, 2632.18401791, 2632.20428424,\n 2632.23471757, 2632.27427684, 2632.28440703, 2632.30410907,\n 2632.32379534, 2632.34327495, 2632.36369491, 2632.38375138,\n 2632.42302852, 2632.44249743, 2632.46158007, 2632.48108533,\n 2632.50152066, 2632.52243422, 2632.54487767, 2632.56807225]]) index_in_trajectory
(chain, draw)
int64
4 -1 3 -3 4 3 ... 4 -2 -3 -2 -4 4
array([[ 4, -1, 3, -3, 4, 3, -3, 2, -5, -5, -1, -5, 3,\n -5, -2, 5, -2, -2, 3, 2, -7, -1, 2, 1, -2, -2,\n 2, 2, -2, 2, -1, 5, -2, -3, 1, -1, 2, 7, 6,\n -3, -3, -3, -2, -2, -2, -2, -4, -6, 2, -3, 3, -1,\n 2, -5, -2, -1, 4, 2, 6, 1, -3, -6, 1, 2, 7,\n 0, -3, -4, -2, 3, -4, 3, -4, -2, -1, 2, 5, 6,\n -3, -1, -5, 2, -6, 1, -1, 3, -1, -3, 2, -1, -3,\n -3, -1, 3, 4, -6, -3, -2, 2, 6, -3, 3, -1, 3,\n -5, -1, 2, -1, 2, 2, 2, -4, -1, 1, -2, -3, 5,\n 1, -2, 5, 2, -2, 3, 0, 2, 4, -5, -2, -1, -3,\n 4, -3, -1, -2, -2, 2, -6, -3, 1, 3, 6, 2, -2,\n 2, 2, -3, 5, 3, 1, -7, -3, -2, 6, -1, -3, 6,\n -2, 3, 5, -3, 4, -3, 1, -4, -3, 2, 3, 5, 4,\n 5, 4, 4, -2, -1, 6, 3, -5, 3, 5, 1, -2, 2,\n 3, 1, -2, -4, 5, -3, -4, -2, -3, -3, -4, -5, -3,\n -2, 4, 5, 2, 2, -3, 4, 6, -4, 3, 0, 7, 3,\n 3, -2, -6, -5, -1, 1, 2, 2, 1, -3, -2, -6, -3,\n 7, -1, -3, -1, 2, 5, -2, -7, 5, 3, 3, -1, 5,\n 2, 2, -2, 2, -4, 4, 4, 3, 2, -2, 7, 1, 2,\n -6, -7, 3, 6, 2, 3, -3, 3, 2, 3, 1, 5, -4,\n...\n -3, -2, -1, -3, 3, -3, 2, 6, 6, -4, 1, -3, -4,\n 5, -2, -2, 5, -7, -3, 2, 4, -2, 4, -2, 4, 6,\n 1, -7, -3, 4, -3, 1, 2, -2, -5, 2, 5, -5, 6,\n 5, 1, -5, -5, 1, 4, 2, 2, 4, 6, -1, -1, -3,\n 3, 4, 2, 3, 3, 3, 4, 2, 1, -1, 3, -6, 2,\n 2, -7, -5, -2, 4, 7, -3, -3, -2, 2, -1, 3, -1,\n -5, 2, -5, 5, 3, 2, 1, 4, 3, -2, -2, 3, 1,\n -6, -2, 3, -2, -2, 5, -3, 3, -2, -3, 6, -4, 4,\n 6, -3, -6, -2, 6, 1, 3, -3, -4, -5, -3, -2, -1,\n -1, -1, -4, -4, 1, -2, -1, 1, -3, -3, 2, 1, -2,\n -6, 6, 4, -4, -5, -1, 4, 6, 3, 1, 7, 5, -3,\n 4, 7, 3, 3, 3, -3, 2, -3, 5, -3, -2, 3, -1,\n 6, 4, 2, 1, -3, 3, 1, -4, 4, -1, -7, -1, -2,\n -3, 0, 2, 0, 4, -2, -2, 5, 3, 5, 2, -4, -3,\n -2, 5, -4, -3, 2, 3, 3, -4, 4, 0, -3, -4, 2,\n 3, -2, -2, 1, -1, 6, -5, -3, 2, -1, -3, 1, -3,\n 2, 7, -3, -1, -2, -2, -2, 2, 3, -3, -2, 2, -2,\n -4, -5, -5, -4, 1, -2, -2, -3, 3, -2, -12, -4, -5,\n 4, -3, -4, -7, 3, 1, -5, 2, -3, 3, 2, -2, 4,\n 4, -2, -3, -2, -4, 4]]) reached_max_treedepth
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) step_size
(chain, draw)
float64
0.53 0.53 0.53 ... 0.4744 0.4744
array([[0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n 0.52997547, 0.52997547, 0.52997547, 0.52997547, 0.52997547,\n...\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816,\n 0.47436816, 0.47436816, 0.47436816, 0.47436816, 0.47436816]]) diverging
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) energy_error
(chain, draw)
float64
0.2536 0.2799 ... 0.09318 -0.03638
array([[ 2.53572884e-01, 2.79878072e-01, 5.42750770e-02,\n 3.74554019e-01, -4.94742630e-01, 7.81466398e-02,\n -8.25426137e-02, -1.09557549e-01, -1.86165734e-01,\n 7.10746180e-02, -1.73225437e-01, 1.02405849e-01,\n 4.67979338e-01, -2.66565936e-01, 1.15245309e-01,\n 4.25658237e-02, -2.30807561e-01, -3.02454872e-01,\n 4.73189088e-02, 7.77315442e-02, 1.89080623e-01,\n -3.91272144e-01, -3.24840224e-02, 1.64339126e-01,\n -9.72280913e-03, 2.98931684e-02, -6.80183111e-02,\n 6.38062721e-01, -7.11971248e-01, 2.07633550e-01,\n -1.37227612e-01, 3.62624135e-01, 3.18822142e-01,\n 5.26094541e-01, 1.60081809e-01, -2.05446556e-01,\n -5.59573809e-01, 5.62657866e-01, -4.33410386e-01,\n -1.21190893e-01, 1.44072296e-01, -5.19481966e-02,\n 8.84679611e-01, -4.11061195e-01, -1.35943588e-01,\n -4.29844887e-01, 1.75882264e-03, -8.51694587e-02,\n 2.67008385e-01, -4.61675221e-01, 7.35312357e-02,\n 4.54258408e-01, -3.12267160e-01, 5.06058833e-01,\n -9.95097430e-02, -4.17124578e-01, -5.23523484e-01,\n 8.63159837e-01, -2.59606682e-01, -6.37244545e-01,\n...\n -7.28945777e-01, -1.39666301e-01, 7.14679073e-01,\n 2.75776708e-01, -9.43831437e-01, 1.92335311e-01,\n 8.45307367e-01, -5.84551014e-01, -1.37004416e-02,\n -1.63112513e-01, -6.24043605e-02, -3.11533343e-01,\n 4.13459431e-01, -1.90738873e-01, 2.66916059e-01,\n 4.78062694e-01, 9.22135365e-01, -3.89369386e-01,\n -2.19431661e-01, -2.46677557e-01, 4.37770299e-02,\n -2.81544324e-01, 6.38754113e-03, 5.26360197e-02,\n 3.35994130e-01, -2.03820444e-01, 5.18397101e-01,\n -2.29346875e-01, 2.61206759e-02, -5.70660190e-02,\n -4.14165113e-01, 1.20221509e-02, -9.55396916e-02,\n -4.50289422e-04, 2.55986706e-01, -2.96344666e-01,\n 3.69927908e-02, 1.51239414e-01, 1.99728145e-01,\n -1.22670731e-01, -5.40393545e-01, -3.06857078e-01,\n -7.39752878e-02, 1.54589319e-01, 6.28044281e-01,\n -1.65828433e-01, -4.38850708e-01, 1.88072667e-01,\n 2.90436350e-01, -2.51038878e-01, 7.33216438e-01,\n -2.09263138e-01, -1.84678219e-01, 1.62600425e-01,\n -1.59711103e-01, -7.70659562e-02, -2.40212298e-02,\n 9.31844101e-02, -3.63790858e-02]]) largest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) tree_depth
(chain, draw)
int64
3 3 3 3 3 3 3 3 ... 3 3 3 3 3 3 3 3
array([[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 2, 3, 2, 3, 2, 3, 3, 3,\n 3, 3, 2, 2, 3, 2, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 2,\n 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 2, 2, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3,\n 3, 3, 2, 3, 3, 2, 3, 4, 3, 3, 3, 3, 3, 2, 3, 3, 3, 2, 2, 3, 3, 3,\n 3, 2, 3, 3, 2, 3, 3, 3, 2, 3, 3, 3, 2, 3, 3, 3, 3, 2, 2, 3, 2, 2,\n 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 2, 3, 3, 3, 3,\n 3, 3, 2, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 2,\n 2, 3, 3, 2, 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 2, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 3, 2, 3, 3, 2, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3, 2, 4, 2, 3,\n 2, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3,\n 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 2,\n 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, 4, 3, 3, 3, 2, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 2, 2, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 4, 3, 3, 2, 3, 2, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 2, 2,\n 3, 2, 3, 4, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 2, 3, 4, 2, 3, 3, 3, 3, 3, 3, 2, 3, 2, 3, 2,\n 3, 3, 3, 3, 2, 3, 3, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 2, 3, 3, 3, 4, 2, 3, 3,\n...\n 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 4, 3, 3, 3, 3, 2, 3, 4, 3, 4, 3, 3,\n 3, 3, 2, 3, 3, 2, 2, 3, 3, 3, 3, 2, 2, 3, 3, 1, 3, 3, 3, 3, 3, 3,\n 3, 3, 2, 2, 2, 3, 2, 2, 3, 3, 3, 3, 4, 3, 3, 4, 3, 3, 3, 3, 3, 3,\n 3, 3, 2, 3, 4, 3, 1, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 2, 3, 2, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3,\n 3, 3, 3, 3, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 1, 2, 2, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 2, 3,\n 3, 2, 3, 3, 3, 3, 3, 2, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3,\n 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 3, 2, 3, 2, 2, 3, 3, 3, 3, 2, 2, 3, 2, 3, 3, 2, 3, 3, 3,\n 3, 3, 3, 2, 2, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 2, 3, 2, 3, 2, 3, 2, 2, 3, 2, 3, 3, 2, 2, 2, 3, 3, 3, 3,\n 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3, 2, 2, 3, 3, 3, 3,\n 2, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2,\n 3, 2, 3, 2, 3, 3, 2, 3, 4, 3, 2, 3, 2, 3, 3, 3, 3, 3, 2, 3, 3, 3,\n 2, 3, 3, 3, 2, 3, 3, 3, 2, 3, 3, 3, 3, 3, 2, 4, 3, 3, 3, 3, 3, 3,\n 3, 3, 3, 2, 2, 3, 2, 3, 3, 3, 3, 2, 3, 3, 3, 2, 3, 4, 2, 3, 2, 2,\n 3, 3, 2, 2, 3, 2, 3, 3, 3, 3, 3, 3, 2, 3, 3, 2, 4, 3, 3, 3, 3, 4,\n 4, 2, 3, 3, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 3, 3]]) max_energy_error
(chain, draw)
float64
0.5126 0.8966 ... 0.4156 0.1649
array([[ 0.51264033, 0.89664452, -0.34829787, 0.37455402, -0.61226734,\n -0.44477441, -0.58089633, 0.77762143, 0.30619347, 0.19962347,\n 0.34288771, 0.30377208, 0.46797934, -0.50828229, -0.21271919,\n -0.25891322, -0.23080756, -0.30245487, 0.73859706, 0.2122133 ,\n 1.25524366, -0.56179043, -0.35560877, 0.32730283, 0.18491935,\n 1.12487658, 0.61728094, 0.63806272, -0.71197125, 0.33585655,\n -0.21791457, 0.60859814, 0.45775993, 1.34123266, 1.07304827,\n -0.7776546 , -0.56551804, 0.74571825, -0.86773428, 0.48556311,\n 0.46560484, 0.32457203, 1.29354194, -0.46146367, -0.77274071,\n -0.71920732, 0.26717826, -0.2084788 , 0.27718681, -0.46167522,\n 0.98013372, 0.599298 , 0.69592637, 0.63708071, -0.52225633,\n 0.95038662, -0.52352348, 1.87066065, -0.35106404, -0.63724455,\n 0.51890181, 0.43379135, 0.12141178, 0.23220972, -0.40037877,\n 0.81258791, -0.17441236, 0.21912232, -0.1418094 , 0.40426358,\n -0.19266186, 0.29114668, -0.18350436, 0.44725201, 0.92318199,\n -1.31862723, 2.64609417, -0.8919105 , -0.65779802, 0.73964985,\n 0.7226031 , 0.54461231, -0.78504171, -0.32149914, -0.47340515,\n 0.38379499, 0.18903804, -0.34608194, -0.48119995, 0.15106933,\n -0.35183776, -0.09452987, 0.44043369, 0.58206949, -0.27956399,\n -0.11863408, 1.08795794, -0.21148766, 0.22471117, -0.40062218,\n...\n 0.13652831, -0.16631781, 1.43639012, -0.7529431 , 0.99855484,\n 0.79695099, -0.42935665, 0.33985748, 0.66511394, 0.55093328,\n 0.46249722, -0.38821353, -0.32899835, -0.358249 , -0.35283951,\n 0.25114435, -0.21194625, 1.18548156, 0.76600908, 9.78390233,\n -0.28853895, 0.42415624, -0.2845146 , 0.62334759, 0.64652938,\n 0.94232967, -0.27900537, 2.40256952, 0.52940022, 0.44978356,\n 0.84104045, -0.44063864, 0.42963323, -0.37123496, 0.20533484,\n 0.69797673, 0.38793992, 0.1598614 , 0.99208634, 1.89498599,\n 1.01212453, -0.72894578, 1.36129451, 0.71467907, 0.66227159,\n -0.94383144, 0.55276103, 0.84530737, -0.85844449, -0.31814179,\n -0.44212536, 0.4456122 , -0.31153334, 0.60800955, 0.34431337,\n 0.4898156 , 0.47806269, 0.92213536, 0.78225423, -0.35147713,\n -0.32933553, 0.29732841, 2.31638546, 0.35980958, 0.42563364,\n 0.88543962, 0.73240068, 0.5183971 , -0.91681522, -0.6458505 ,\n -0.74569464, -0.41416511, 0.27762096, -0.28100951, 0.67269453,\n 0.51180786, -0.29634467, -0.08563039, 1.03099952, 0.4408778 ,\n -2.01691715, -1.55719318, -0.41080294, -0.65849958, 2.24974087,\n 1.23091827, -0.58139985, -0.43885071, 0.2195215 , 0.29043635,\n -0.39782467, 0.848183 , -0.23471125, -0.20502772, 0.23253864,\n 0.514116 , -0.14199875, 0.57717726, 0.41560939, 0.16490162]]) energy
(chain, draw)
float64
1.998e+03 2.002e+03 ... 2e+03
array([[1997.71718369, 2001.63696946, 2000.48067929, 2004.44644121,\n 2002.26626943, 1999.89557203, 1998.95049068, 2001.16275204,\n 1997.87353048, 1997.66474669, 1998.5036919 , 1997.30261185,\n 2003.37923965, 2004.78416862, 2002.52448142, 2004.39597654,\n 2001.25376547, 1997.58162495, 1997.42972698, 1997.50271439,\n 2000.62094748, 1998.5087813 , 1999.55095316, 2000.91352987,\n 1999.19112902, 2001.75944955, 2000.18827453, 2001.50571511,\n 2000.87338482, 1998.87554432, 1998.51351431, 2001.74894268,\n 2005.26063892, 2006.16518335, 2005.16964912, 2003.48493956,\n 2003.15639994, 2005.59857463, 2000.16345362, 2000.08142495,\n 2001.06349365, 2001.68191017, 2002.77588846, 2001.40324287,\n 2003.93748685, 2003.74368356, 1997.85685382, 1996.63626049,\n 1998.66734025, 1999.42939377, 2000.77563951, 1998.98174117,\n 2000.95328764, 2003.28405131, 2001.67843836, 2004.6703001 ,\n 2003.86984254, 2007.49763191, 2003.02977221, 2001.48615706,\n 1995.68626862, 1997.41914068, 1996.18446816, 1997.17045151,\n 1998.24747347, 2000.7786114 , 1996.41526118, 1999.62521487,\n 1998.09203358, 2000.11090465, 1998.32396563, 1996.00433112,\n 1996.73886069, 2000.07905345, 2002.88782218, 2004.99781026,\n 2007.69730796, 2008.53452524, 2001.05321988, 2002.02952111,\n...\n 1998.27945008, 1998.68605831, 1995.66858171, 1998.18266488,\n 1999.74582354, 2003.22096629, 2000.41903339, 2004.23567179,\n 1999.35673018, 2000.45948146, 2001.24516306, 1998.89185964,\n 1999.58402567, 1997.06842493, 1996.95115187, 2001.37047321,\n 2001.65167103, 1997.38294241, 2000.43973406, 2001.92666044,\n 2002.91733937, 2000.3234572 , 1998.56749446, 1998.81780045,\n 2000.99198666, 2000.95148069, 1997.65759042, 2001.6834058 ,\n 2000.73469387, 1999.23901908, 2000.73093378, 1999.40693989,\n 1997.12642692, 2000.32601273, 1998.53126532, 1998.12891517,\n 2000.42454591, 2004.72437571, 2003.19959848, 2000.4927576 ,\n 1996.39172526, 1996.92762798, 2001.46138683, 1998.05709138,\n 1996.62947297, 2000.50388952, 2001.58058836, 1999.38337071,\n 1998.96373172, 1998.66487688, 2003.31451519, 2001.4254794 ,\n 1999.14339044, 1997.64870183, 1996.86845591, 1999.70095363,\n 1998.81689425, 1995.34637478, 2000.15604519, 2005.60026942,\n 2008.17045932, 2006.66674746, 2005.4178891 , 2003.62984637,\n 2005.574225 , 2002.23237362, 2000.5571393 , 1999.4320589 ,\n 1996.70793909, 2004.26007049, 2004.86461975, 2003.94778543,\n 2000.19957867, 1997.83397898, 1997.25923099, 1999.24963068,\n 1996.46779129, 1997.24515173, 2001.08876798, 1999.53192204]]) Attributes: (8)
created_at : 2026-01-08T04:58:29.498781+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 49.984930753707886 tuning_steps : 500 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n observed_data \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 24kB\nDimensions: (__obs__: 1000, rt,response_extra_dim_0: 2)\nCoordinates:\n * __obs__ (__obs__) int64 8kB 0 1 2 3 4 ... 996 997 998 999\n * rt,response_extra_dim_0 (rt,response_extra_dim_0) int64 16B 0 1\nData variables:\n rt,response (__obs__, rt,response_extra_dim_0) float64 16kB ...\nAttributes:\n created_at: 2026-01-08T04:58:29.507653+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions: __obs__ : 1000rt,response_extra_dim_0 : 2
Coordinates: (2)
Data variables: (1)
rt,response
(__obs__, rt,response_extra_dim_0)
float64
3.048 1.0 2.922 ... 1.0 2.142 1.0
array([[ 3.04842567, 1. ],\n [ 2.92167377, 1. ],\n [ 0.93402535, -1. ],\n ...,\n [ 0.5660336 , 1. ],\n [ 2.77922082, 1. ],\n [ 2.1417582 , 1. ]], shape=(1000, 2)) Attributes: (6)
created_at : 2026-01-08T04:58:29.507653+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n
\n "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZsHO",
+ "code_hash": "70ebb3d3d7aa6a00da6bc9bf5e77683c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "HNXk",
+ "code_hash": "ae51fbf7ab64e830ec4568ab822e3d44",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "bhgI",
+ "code_hash": "44f947eb61bdc12553048113bd4384ef",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "DlKN",
+ "code_hash": "b0db33c2eaa15cf2009a0c7603a93680",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "QYfb",
+ "code_hash": "44021c99c9c8dab765e6585956b86186",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "qJXu",
+ "code_hash": "95170aee36a03a4e7f90b1be271bbf52",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "LdJG",
+ "code_hash": "9f8147656aca8428164678c11972c9e3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (1 chains in 1 job)\nNUTS: [z, theta, a, t, v_Intercept, v_x, v_y]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1500 0 0.264 15 18.20 draws/s 0:01:22 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 1 chain for 500 tune and 1_000 draw iterations (500 + 1_000 draws total) took 82 seconds.\nOnly one chain was sampled, this makes it impossible to run some convergence checks\n\r 0%| | 0/1000 [00:00, ?it/s]\r 8%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 76/1000 [00:00<00:01, 757.53it/s]\r 16%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 160/1000 [00:00<00:01, 804.35it/s]\r 24%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 243/1000 [00:00<00:00, 812.84it/s]\r 33%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 328/1000 [00:00<00:00, 825.28it/s]\r 41%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 411/1000 [00:00<00:00, 815.38it/s]\r 49%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 493/1000 [00:00<00:00, 768.91it/s]\r 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 571/1000 [00:00<00:00, 721.54it/s]\r 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 644/1000 [00:00<00:00, 714.68it/s]\r 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 719/1000 [00:00<00:00, 724.10it/s]\r 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 792/1000 [00:01<00:00, 725.23it/s]\r 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 878/1000 [00:01<00:00, 763.71it/s]\r 97%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 969/1000 [00:01<00:00, 806.07it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1000/1000 [00:01<00:00, 774.79it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "fOSF",
+ "code_hash": "c6ea4b9fcbc9ce743bfe17ec28c076c0",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "HRKw",
+ "code_hash": "7c9e03defee881af8aaf0f55a384ea93",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "MYoQ",
+ "code_hash": "81131f40505dd8143f8cdd1f3f592748",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "Hierarchical Sequential Sampling Model\nModel: angle\n\nResponse variable: rt,response\nLikelihood: approx_differentiable\nObservations: 1000\n\nParameters:\n\nv:\n Formula: v ~ 1 + x + y\n Priors:\n v_Intercept ~ Uniform(lower: -3.0, upper: 3.0)\n v_x ~ Uniform(lower: -1.0, upper: 1.0)\n v_y ~ Uniform(lower: -1.0, upper: 1.0)\n Link: identity\n Explicit bounds: (-3.0, 3.0)\n\na:\n Formula: a ~ 1 + x + y\n Priors:\n a_Intercept ~ Uniform(lower: 0.5, upper: 3.0)\n a_x ~ Uniform(lower: -1.0, upper: 1.0)\n a_y ~ Uniform(lower: -1.0, upper: 1.0)\n Link: identity\n Explicit bounds: (0.3, 3.0)\n\nz:\n Prior: Uniform(lower: 0.1, upper: 0.9)\n Explicit bounds: (0.1, 0.9)\n\nt:\n Prior: Uniform(lower: 0.001, upper: 2.0)\n Explicit bounds: (0.001, 2.0)\n\ntheta:\n Prior: Uniform(lower: -0.1, upper: 1.3)\n Explicit bounds: (-0.1, 1.3)\n\n\nLapse probability: 0.05\nLapse distribution: Uniform(lower: 0.0, upper: 20.0) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "BzwM",
+ "code_hash": "05b233760d741781b9ffd2a549f7e6c4",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "nSRe",
+ "code_hash": "d0533d558dd38a9400a769ddf852a4f2",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [z, theta, t, v_Intercept, v_x, v_y, a_Intercept, a_x, a_y]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.449 7 21.24 draws/s 0:01:34 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.277 15 10.14 draws/s 0:03:17 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 1_000 tune and 1_000 draw iterations (2_000 + 2_000 draws total) took 197 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/2000 [00:00, ?it/s]\r 5%|\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 95/2000 [00:00<00:02, 937.45it/s]\r 10%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 192/2000 [00:00<00:01, 952.42it/s]\r 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 288/2000 [00:00<00:01, 921.84it/s]\r 19%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 381/2000 [00:00<00:01, 922.50it/s]\r 24%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 474/2000 [00:00<00:01, 914.14it/s]\r 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 566/2000 [00:00<00:01, 882.52it/s]\r 33%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 655/2000 [00:00<00:01, 874.88it/s]\r 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 743/2000 [00:00<00:01, 856.10it/s]\r 41%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 829/2000 [00:00<00:01, 837.73it/s]\r 46%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 913/2000 [00:01<00:01, 826.69it/s]\r 50%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 1000/2000 [00:01<00:01, 838.23it/s]\r 54%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1089/2000 [00:01<00:01, 853.01it/s]\r 59%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1179/2000 [00:01<00:00, 863.67it/s]\r 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1266/2000 [00:01<00:01, 686.72it/s]\r 67%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1341/2000 [00:01<00:01, 537.05it/s]\r 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1404/2000 [00:01<00:01, 544.56it/s]\r 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 1465/2000 [00:01<00:00, 547.16it/s]\r 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1525/2000 [00:02<00:00, 550.37it/s]\r 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 1584/2000 [00:02<00:00, 553.16it/s]\r 82%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1642/2000 [00:02<00:00, 546.83it/s]\r 85%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1699/2000 [00:02<00:00, 542.15it/s]\r 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1755/2000 [00:02<00:00, 541.85it/s]\r 91%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1811/2000 [00:02<00:00, 545.58it/s]\r 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 1867/2000 [00:02<00:00, 518.56it/s]\r 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1920/2000 [00:02<00:00, 519.36it/s]\r 99%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1976/2000 [00:02<00:00, 529.68it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2000/2000 [00:02<00:00, 671.09it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "TmyF",
+ "code_hash": "29accd6f1517fbcf2c853bc5a6d7c438",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "tbSj",
+ "code_hash": "ba9b62d415b8fb5b23b67d658f585145",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "eLzN",
+ "code_hash": "ce80c8b9ed14ed14c9015acfafaf7de7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "JHzP",
+ "code_hash": "167ead55e24ad2713ece00502d6e7b79",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "elJp",
+ "code_hash": "001120caffcd10214ed7bb5761a74991",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "EyBw",
+ "code_hash": "f7be7fdd87f2ea7287963dfa606b0b1a",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [z, theta, a, t, v_C(x), v_y]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1500 0 0.362 7 24.34 draws/s 0:01:01 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1500 0 0.343 7 11.57 draws/s 0:02:09 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 500 tune and 1_000 draw iterations (1_000 + 2_000 draws total) took 130 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/2000 [00:00, ?it/s]\r 4%|\u2588\u2588\u2588\u2588\u2588\u258f | 80/2000 [00:00<00:02, 795.96it/s]\r 9%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 173/2000 [00:00<00:02, 873.05it/s]\r 13%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 261/2000 [00:00<00:02, 819.39it/s]\r 17%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 344/2000 [00:00<00:02, 767.41it/s]\r 21%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 422/2000 [00:00<00:02, 725.52it/s]\r 25%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 496/2000 [00:00<00:02, 690.54it/s]\r 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 566/2000 [00:00<00:02, 665.95it/s]\r 32%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 633/2000 [00:00<00:02, 645.10it/s]\r 35%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 698/2000 [00:01<00:02, 611.46it/s]\r 38%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 760/2000 [00:01<00:02, 578.56it/s]\r 41%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 821/2000 [00:01<00:02, 584.52it/s]\r 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 887/2000 [00:01<00:01, 603.39it/s]\r 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 958/2000 [00:01<00:01, 630.81it/s]\r 52%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1030/2000 [00:01<00:01, 654.83it/s]\r 55%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1107/2000 [00:01<00:01, 686.43it/s]\r 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1191/2000 [00:01<00:01, 728.97it/s]\r 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1275/2000 [00:01<00:00, 760.20it/s]\r 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1365/2000 [00:01<00:00, 799.46it/s]\r 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1446/2000 [00:02<00:00, 798.30it/s]\r 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1526/2000 [00:02<00:00, 791.15it/s]\r 81%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1611/2000 [00:02<00:00, 806.06it/s]\r 85%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1696/2000 [00:02<00:00, 818.36it/s]\r 89%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1778/2000 [00:02<00:00, 813.75it/s]\r 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1863/2000 [00:02<00:00, 822.82it/s]\r 98%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1951/2000 [00:02<00:00, 839.23it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2000/2000 [00:02<00:00, 735.09it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "MrKo",
+ "code_hash": "263ac2f3ceed32c9cf4c903f7bf5b3f9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ofkl",
+ "code_hash": "59facf26867c6e91b470717661a4c070",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "TzTD",
+ "code_hash": "39f453e72dcc8b60538e5750c636184d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "ywjc",
+ "code_hash": "175b601e84163b0c002cc17c0ceec3f0",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "WyQu",
+ "code_hash": "96a6485ec3fe08d449aeeb1b2e29f903",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n \n \n
\n \n \n \n posterior \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 308kB\nDimensions: (chain: 2, draw: 500,\n v_1|participant_id__factor_dim: 15)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 ... 496 497 498 499\n * v_1|participant_id__factor_dim (v_1|participant_id__factor_dim) <U2 120B ...\nData variables:\n v_Intercept (chain, draw) float64 8kB 0.4264 ... 0.7638\n v_1|participant_id (chain, draw, v_1|participant_id__factor_dim) float64 120kB ...\n v_x (chain, draw) float64 8kB 0.8355 ... 0.7895\n z (chain, draw) float64 8kB 0.4888 ... 0.4776\n v_1|participant_id_offset (chain, draw, v_1|participant_id__factor_dim) float64 120kB ...\n theta (chain, draw) float64 8kB 0.04164 ... 0.0...\n v_y (chain, draw) float64 8kB 0.3191 ... 0.3441\n a (chain, draw) float64 8kB 1.524 ... 1.505\n t (chain, draw) float64 8kB 0.52 ... 0.5063\n v_1|participant_id_sigma (chain, draw) float64 8kB 0.6355 ... 0.376\nAttributes:\n created_at: 2026-01-08T05:23:59.285575+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 1005.5311143398285\n tuning_steps: 500\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions: chain : 2draw : 500v_1|participant_id__factor_dim : 15
Coordinates: (3)
chain
(chain)
int64
0 1
draw
(draw)
int64
0 1 2 3 4 5 ... 495 496 497 498 499
array([ 0, 1, 2, ..., 497, 498, 499], shape=(500,)) v_1|participant_id__factor_dim
(v_1|participant_id__factor_dim)
<U2
'0' '1' '2' '3' ... '12' '13' '14'
array(['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12',\n '13', '14'], dtype='<U2') Data variables: (10)
v_Intercept
(chain, draw)
float64
0.4264 0.6527 ... 0.7272 0.7638
array([[0.42640404, 0.65274261, 0.56023536, 0.60417001, 0.59482216,\n 0.51860914, 0.55656672, 0.57616957, 0.53656839, 0.55418519,\n 0.58562617, 0.52664931, 0.52040458, 0.46260286, 0.45894046,\n 0.4274176 , 0.45560905, 0.51159136, 0.49059808, 0.50052211,\n 0.35063551, 0.33328413, 0.45677858, 0.66445257, 0.58042548,\n 0.60549726, 0.60673843, 0.58521359, 0.66370156, 0.65261589,\n 0.61842521, 0.66651348, 0.65711652, 0.70263252, 0.65929892,\n 0.66652437, 0.62887965, 0.67624088, 0.76585766, 0.75197683,\n 0.68052799, 0.68656567, 0.67793322, 0.58694436, 0.63227268,\n 0.63638388, 0.63003468, 0.60565397, 0.60016461, 0.61493434,\n 0.54612468, 0.57204647, 0.35277427, 0.37229316, 0.42781226,\n 0.35580912, 0.30747953, 0.22850558, 0.37131044, 0.15345665,\n 0.50689641, 0.5196562 , 0.60192155, 0.73383517, 0.7782408 ,\n 0.76515894, 0.75700635, 0.66972908, 0.70167148, 0.70661322,\n 0.69545602, 0.65912507, 0.75544228, 0.8075718 , 0.6112275 ,\n 0.59868646, 0.51794842, 0.66626908, 0.56472935, 0.75522631,\n 0.71886488, 0.74358325, 0.69679274, 0.6615853 , 0.48539714,\n 0.57549196, 0.58034831, 0.58586631, 0.54779534, 0.56073301,\n 0.55013721, 0.53516606, 0.65462612, 0.63984593, 0.61204587,\n 0.65568896, 0.71900207, 0.65141011, 0.63283056, 0.63419567,\n...\n 0.59182891, 0.52654462, 0.45312635, 0.37827376, 0.65713991,\n 0.59277107, 0.25012072, 0.23218657, 0.48315799, 0.4661085 ,\n 0.51314588, 0.56476428, 0.53386868, 0.45281157, 0.34242277,\n 0.43120255, 0.39179279, 0.36188016, 0.35771363, 0.54243135,\n 0.55270463, 0.61816613, 0.56082824, 0.40805518, 0.51613766,\n 0.59574253, 0.46949776, 0.41892823, 0.36292976, 0.44247332,\n 0.52097405, 0.49617135, 0.46248951, 0.45695877, 0.72918539,\n 0.74540053, 0.45221671, 0.48151198, 0.47267074, 0.52184966,\n 0.5447969 , 0.57370051, 0.56917352, 0.55577414, 0.65502191,\n 0.57840432, 0.55397032, 0.55226677, 0.55119088, 0.57088483,\n 0.63965071, 0.59257655, 0.5700089 , 0.57320233, 0.57363756,\n 0.58450845, 0.69686541, 0.56899045, 0.69119928, 0.68653895,\n 0.56384969, 0.5501474 , 0.51606028, 0.49039632, 0.47111854,\n 0.53903155, 0.57224983, 0.70106167, 0.7776272 , 0.76302444,\n 0.6547194 , 0.63349046, 0.70998376, 0.66103543, 0.7067445 ,\n 0.67102583, 0.75847379, 0.87838122, 0.51515353, 0.51982511,\n 0.47884046, 0.53783127, 0.53344066, 0.50568584, 0.61042705,\n 0.47712212, 0.4810495 , 0.406087 , 0.40141883, 0.40333215,\n 0.45003552, 0.41361863, 0.5054528 , 0.50687282, 0.51490015,\n 0.65664228, 0.65637855, 0.66631228, 0.72723679, 0.76376494]]) v_1|participant_id
(chain, draw, v_1|participant_id__factor_dim)
float64
0.4529 0.3071 ... -0.8213 0.7953
array([[[ 0.45287528, 0.30714809, -0.28577973, ..., -0.23544136,\n -0.39672421, 1.20833348],\n [ 0.04186083, -0.06785837, -0.54600005, ..., -0.44406369,\n -0.68969059, 0.79830555],\n [ 0.28614867, 0.08859129, -0.39296176, ..., -0.42291199,\n -0.58379543, 0.88484914],\n ...,\n [ 0.2047774 , 0.26112966, -0.45626174, ..., -0.44901697,\n -0.64719957, 0.90693676],\n [ 0.35596049, -0.00554826, -0.37478011, ..., -0.35948955,\n -0.49265857, 0.97914896],\n [ 0.43278892, 0.21245074, -0.29235637, ..., -0.28306664,\n -0.44184753, 1.13201112]],\n\n [[ 0.17217265, 0.082477 , -0.48359969, ..., -0.44164693,\n -0.59903633, 0.85835088],\n [ 0.2847594 , 0.04117175, -0.38422394, ..., -0.37541013,\n -0.5593601 , 0.748388 ],\n [ 0.20664175, 0.16028363, -0.43027708, ..., -0.36927916,\n -0.53287128, 0.884874 ],\n ...,\n [ 0.15489383, 0.07502011, -0.58823979, ..., -0.56620755,\n -0.7554724 , 0.83895508],\n [ 0.22665483, -0.10831313, -0.55863349, ..., -0.66419793,\n -0.65301957, 0.71196316],\n [ 0.07512384, 0.00688033, -0.63260338, ..., -0.49957389,\n -0.8213285 , 0.79534116]]], shape=(2, 500, 15)) v_x
(chain, draw)
float64
0.8355 0.7878 ... 0.842 0.7895
array([[0.83548879, 0.78775718, 0.8267601 , 0.84626806, 0.79980495,\n 0.83050093, 0.82562578, 0.79794752, 0.79323865, 0.79680647,\n 0.79367019, 0.82820125, 0.80605073, 0.80560566, 0.82692893,\n 0.74819787, 0.81124015, 0.79307083, 0.78265827, 0.78137861,\n 0.83499172, 0.76329241, 0.83098008, 0.78850568, 0.79105236,\n 0.8209256 , 0.78822329, 0.83517185, 0.82614021, 0.82873719,\n 0.79389402, 0.80448942, 0.7813741 , 0.81388661, 0.7902266 ,\n 0.83850114, 0.78879722, 0.78444872, 0.84565004, 0.84109562,\n 0.77961749, 0.75570146, 0.79889927, 0.76158503, 0.752657 ,\n 0.83293645, 0.83897052, 0.76272822, 0.75325548, 0.73969386,\n 0.86439847, 0.76605338, 0.85404946, 0.8283195 , 0.80811471,\n 0.80961596, 0.82231241, 0.8140471 , 0.79364852, 0.79389832,\n 0.83431315, 0.81817331, 0.82160212, 0.81896759, 0.75974165,\n 0.84647351, 0.79145411, 0.83450799, 0.81400083, 0.82468402,\n 0.80017813, 0.81839942, 0.81986862, 0.80981109, 0.74287498,\n 0.73265537, 0.73327476, 0.89552271, 0.80362817, 0.82495531,\n 0.84285463, 0.83512998, 0.85720055, 0.76015379, 0.86304052,\n 0.788043 , 0.78632426, 0.80890624, 0.80427364, 0.81140658,\n 0.79895372, 0.7724524 , 0.8160105 , 0.82831307, 0.85135739,\n 0.79900773, 0.82221128, 0.85236538, 0.82204269, 0.82277339,\n...\n 0.81898846, 0.79152571, 0.82616864, 0.77498174, 0.75870415,\n 0.84612846, 0.79920173, 0.76573814, 0.87759005, 0.84965264,\n 0.78015177, 0.86121438, 0.78969805, 0.80701117, 0.81272784,\n 0.84747953, 0.81283555, 0.82047231, 0.79858188, 0.79139118,\n 0.82367946, 0.81565396, 0.79826007, 0.82645963, 0.8260741 ,\n 0.82701391, 0.81072864, 0.79639703, 0.8231219 , 0.82597197,\n 0.79795528, 0.81760744, 0.82066401, 0.81179743, 0.82607438,\n 0.83311101, 0.77138188, 0.83935353, 0.80999804, 0.80391438,\n 0.77720585, 0.83016611, 0.83339754, 0.82412857, 0.7986786 ,\n 0.81103257, 0.82530267, 0.81835513, 0.8203286 , 0.74745896,\n 0.77187943, 0.79118842, 0.8062483 , 0.78901587, 0.7959551 ,\n 0.83814038, 0.78666277, 0.82949089, 0.77444389, 0.80127138,\n 0.82512537, 0.82719616, 0.83902176, 0.8367364 , 0.85136562,\n 0.85918953, 0.80139213, 0.82411559, 0.87315328, 0.86589034,\n 0.85927643, 0.8621246 , 0.77905831, 0.78576037, 0.84083463,\n 0.83680352, 0.7852643 , 0.78375067, 0.80324991, 0.77824365,\n 0.78478037, 0.79826349, 0.79383984, 0.7998459 , 0.83382547,\n 0.82755799, 0.83431754, 0.80545534, 0.81362652, 0.81339601,\n 0.79243516, 0.83219835, 0.75167379, 0.82410449, 0.79844871,\n 0.81043459, 0.82760076, 0.84093142, 0.84203252, 0.78946657]]) z
(chain, draw)
float64
0.4888 0.4903 ... 0.4837 0.4776
array([[0.48879731, 0.49031549, 0.49804412, 0.49756876, 0.49920511,\n 0.48902832, 0.48448425, 0.49299704, 0.49515459, 0.49053748,\n 0.49206816, 0.49726932, 0.50215568, 0.50111223, 0.49938802,\n 0.49640953, 0.50334371, 0.50106115, 0.50297997, 0.49312116,\n 0.49694302, 0.49838099, 0.49819867, 0.51314846, 0.49566221,\n 0.48651023, 0.48108642, 0.50114314, 0.48598837, 0.48596008,\n 0.50065107, 0.49000629, 0.48185271, 0.47458372, 0.49500223,\n 0.49804652, 0.50686585, 0.50813148, 0.48536024, 0.50344033,\n 0.48779956, 0.49599867, 0.49739274, 0.5020206 , 0.50184445,\n 0.50262612, 0.50086519, 0.49526408, 0.49771497, 0.49974595,\n 0.49844376, 0.4887139 , 0.48864984, 0.4912214 , 0.49039773,\n 0.4948804 , 0.49142293, 0.49781418, 0.48574472, 0.49765887,\n 0.49026447, 0.49942065, 0.48376419, 0.48208812, 0.50946021,\n 0.49354448, 0.48940149, 0.49425368, 0.48991129, 0.48508853,\n 0.48574652, 0.48528806, 0.50611167, 0.4877322 , 0.49774583,\n 0.49985144, 0.50309806, 0.48141833, 0.4964371 , 0.48358724,\n 0.49741427, 0.49605409, 0.50036538, 0.49310773, 0.49334908,\n 0.50051207, 0.49597488, 0.49548137, 0.49618847, 0.495693 ,\n 0.49799893, 0.49540959, 0.47166968, 0.47289206, 0.48546876,\n 0.49681049, 0.49703503, 0.48609766, 0.48691059, 0.48535033,\n...\n 0.47519875, 0.50965557, 0.49028734, 0.49651439, 0.48635476,\n 0.49808077, 0.50709791, 0.50999195, 0.48014917, 0.49030241,\n 0.49270433, 0.48754536, 0.49389586, 0.47049107, 0.50795666,\n 0.49659018, 0.49580244, 0.49419524, 0.49020784, 0.48067692,\n 0.50636224, 0.4904979 , 0.48162528, 0.504318 , 0.48079244,\n 0.48981962, 0.49626 , 0.49660743, 0.49359698, 0.50172744,\n 0.4844043 , 0.49435931, 0.49709065, 0.49128952, 0.495763 ,\n 0.492814 , 0.49450575, 0.48335574, 0.48669748, 0.5061581 ,\n 0.50217917, 0.4781535 , 0.50907541, 0.51443812, 0.47650792,\n 0.49450977, 0.49903865, 0.47490495, 0.49076517, 0.49117852,\n 0.48601073, 0.48738439, 0.48864593, 0.4943657 , 0.4855899 ,\n 0.48611635, 0.49849438, 0.49260385, 0.50358412, 0.48358164,\n 0.49946855, 0.49070826, 0.49579364, 0.49417824, 0.50448063,\n 0.48838733, 0.48637183, 0.50028081, 0.4890853 , 0.49658815,\n 0.49867623, 0.49683558, 0.49050431, 0.49764135, 0.48352776,\n 0.48556543, 0.49672459, 0.49532679, 0.49759404, 0.50022 ,\n 0.49328861, 0.49317466, 0.49386539, 0.48061742, 0.49376874,\n 0.4960595 , 0.50428939, 0.49212331, 0.49407016, 0.49428375,\n 0.49383814, 0.49305912, 0.50036023, 0.50905946, 0.50573419,\n 0.49139337, 0.48564965, 0.48993256, 0.48367629, 0.47758685]]) v_1|participant_id_offset
(chain, draw, v_1|participant_id__factor_dim)
float64
0.7126 0.4833 ... -2.184 2.115
array([[[ 0.712587 , 0.4832892 , -0.44966667, ..., -0.37046062,\n -0.62423481, 1.90128004],\n [ 0.07286224, -0.11811312, -0.95035835, ..., -0.77292967,\n -1.20046363, 1.38951698],\n [ 0.67585527, 0.20924399, -0.92813737, ..., -0.9988769 ,\n -1.3788679 , 2.08992743],\n ...,\n [ 0.50949947, 0.64970755, -1.13520882, ..., -1.11718335,\n -1.61027453, 2.25651751],\n [ 0.92185715, -0.01436875, -0.97059569, ..., -0.9309966 ,\n -1.27587424, 2.5357743 ],\n [ 0.88922875, 0.43651143, -0.60068934, ..., -0.58160222,\n -0.90784101, 2.32588404]],\n\n [[ 0.39984962, 0.19154261, -1.1231003 , ..., -1.02567022,\n -1.39118758, 1.99341345],\n [ 0.71414553, 0.10325427, -0.96359175, ..., -0.94148768,\n -1.40281414, 1.8768755 ],\n [ 0.56727945, 0.44001568, -1.1812102 , ..., -1.0137568 ,\n -1.46285504, 2.42918401],\n ...,\n [ 0.53263785, 0.2579738 , -2.02279696, ..., -1.94703404,\n -2.59786449, 2.88493877],\n [ 0.71556708, -0.34195305, -1.76364975, ..., -2.09692496,\n -2.061634 , 2.24772353],\n [ 0.19979925, 0.01829892, -1.68247096, ..., -1.32866594,\n -2.184404 , 2.1152881 ]]], shape=(2, 500, 15)) theta
(chain, draw)
float64
0.04164 0.007086 ... 0.04099
array([[ 0.0416375 , 0.00708617, 0.01488134, 0.01130536, 0.03207812,\n 0.00719689, 0.04807657, 0.03777332, 0.01210398, 0.01876168,\n 0.01868594, 0.00193287, 0.00653075, 0.00737391, 0.00988868,\n 0.00339079, 0.00584418, 0.00883105, 0.01421541, 0.04088143,\n 0.01197065, 0.02125228, 0.0239326 , 0.0381637 , 0.04985138,\n 0.02311471, 0.04263524, 0.03321626, 0.01847498, 0.02282678,\n 0.02323655, 0.02589956, 0.02609559, 0.03121161, 0.01615104,\n 0.04320515, 0.03305201, 0.03251563, 0.02203652, 0.01492491,\n 0.0354006 , -0.01624237, -0.00077787, 0.02376934, 0.02418681,\n 0.02995706, 0.01849691, 0.01273589, 0.00584213, 0.00305808,\n 0.03510834, 0.02115826, 0.02037207, 0.01018104, 0.00582978,\n 0.03590157, 0.03288227, 0.02125727, 0.02707268, 0.02671271,\n 0.0213814 , 0.03003109, 0.01816119, 0.03404118, 0.02069732,\n 0.02860948, 0.05071767, 0.01212113, 0.00365331, 0.02905507,\n 0.00022452, 0.00373229, 0.00678427, 0.0161299 , 0.03861261,\n 0.03832292, 0.04380864, 0.00494815, 0.0140211 , 0.03876458,\n 0.03326984, 0.02525611, 0.03043264, 0.01626147, 0.02225623,\n 0.03115471, 0.04036971, 0.03598573, 0.0481008 , 0.04647122,\n 0.04568948, 0.02432223, 0.02523883, 0.02797245, 0.01856296,\n 0.04520292, 0.02791444, 0.01258284, 0.00768886, 0.00682574,\n...\n 0.02679673, 0.01522209, 0.0274526 , 0.00962053, 0.03246581,\n 0.02401485, 0.03069385, 0.02874898, 0.01656145, -0.0031235 ,\n 0.01398489, 0.00327424, 0.01695551, -0.0016594 , 0.01909749,\n 0.01256276, 0.02098689, 0.02077396, 0.00845535, 0.04766208,\n 0.04464745, 0.02155203, 0.03231531, 0.0225231 , 0.02936022,\n 0.02384183, 0.0200505 , 0.02775818, 0.02064223, 0.03414142,\n 0.03041948, 0.02044608, 0.02206516, 0.00383221, 0.01358501,\n 0.0144265 , 0.02574929, 0.04613662, 0.04058156, 0.03789068,\n 0.04165765, 0.01726938, 0.01551387, 0.03050737, 0.00257047,\n 0.0338142 , 0.00881431, 0.0161157 , 0.01883054, 0.02426487,\n 0.02335379, 0.02626195, 0.03665696, 0.01953098, 0.02211465,\n 0.01294226, 0.01344848, 0.02029463, 0.06346698, 0.02660627,\n 0.01117603, 0.04814441, 0.03082654, 0.03684704, 0.02401215,\n 0.00969135, 0.00284847, 0.04267009, 0.04234231, 0.01353048,\n 0.04311061, 0.03843648, 0.05235287, 0.02441532, 0.03632427,\n 0.03784573, 0.03941931, 0.00661728, 0.03934009, 0.03892688,\n 0.03608111, 0.04125206, 0.03102403, 0.02161943, 0.05038117,\n -0.01015529, 0.0342641 , 0.00989894, 0.02961388, 0.02694193,\n 0.01300397, 0.02829481, 0.05676759, 0.0238263 , 0.05137682,\n 0.00218322, 0.03355814, 0.03870206, 0.02892689, 0.04099293]]) v_y
(chain, draw)
float64
0.3191 0.34 ... 0.2894 0.3441
array([[0.3190938 , 0.34004645, 0.28389392, 0.26739709, 0.36760311,\n 0.31107053, 0.34187959, 0.333828 , 0.30550866, 0.30564344,\n 0.30577715, 0.31349921, 0.30192963, 0.26776148, 0.33157395,\n 0.33313118, 0.33568076, 0.32693825, 0.33395927, 0.31463966,\n 0.35719223, 0.32547524, 0.34998376, 0.34704496, 0.28657088,\n 0.33461981, 0.33388235, 0.31182823, 0.32124334, 0.34547113,\n 0.31217963, 0.3417354 , 0.35860226, 0.33779685, 0.36335628,\n 0.29881967, 0.31349029, 0.30355268, 0.3462199 , 0.31293221,\n 0.29481759, 0.31474419, 0.3314231 , 0.33705481, 0.3305092 ,\n 0.33242251, 0.34479935, 0.29012555, 0.3154707 , 0.31929515,\n 0.33544242, 0.30854847, 0.33747915, 0.35581523, 0.3829133 ,\n 0.2752542 , 0.33494233, 0.30221563, 0.30451823, 0.36628234,\n 0.3333594 , 0.31936106, 0.32244062, 0.33568418, 0.32587537,\n 0.29974364, 0.2787727 , 0.36049261, 0.33496159, 0.32110111,\n 0.28846938, 0.29015326, 0.33830728, 0.31411324, 0.23952636,\n 0.26156714, 0.28924233, 0.35953452, 0.32938295, 0.31866936,\n 0.34426912, 0.35405863, 0.35195247, 0.33934645, 0.30484446,\n 0.35603317, 0.32408652, 0.30215682, 0.32165844, 0.33434773,\n 0.32977656, 0.35615525, 0.32508365, 0.31559091, 0.33517276,\n 0.27568834, 0.37045128, 0.33020805, 0.32906022, 0.32655142,\n...\n 0.29695955, 0.33478622, 0.30510773, 0.34406312, 0.30151616,\n 0.33137152, 0.31868252, 0.30200989, 0.32754468, 0.33508587,\n 0.32151332, 0.31460226, 0.32698955, 0.33054058, 0.31681446,\n 0.35706224, 0.33823369, 0.32741708, 0.30619877, 0.31444766,\n 0.3281975 , 0.33520851, 0.30339506, 0.32485577, 0.30705238,\n 0.34120855, 0.31708637, 0.3211497 , 0.33548332, 0.30422543,\n 0.34138628, 0.30963343, 0.34236191, 0.28896591, 0.3060744 ,\n 0.30953646, 0.35402949, 0.27510096, 0.27519918, 0.34407102,\n 0.32294371, 0.32640507, 0.33060998, 0.34241879, 0.31684581,\n 0.32499977, 0.31793012, 0.32892719, 0.30500989, 0.32900004,\n 0.33310927, 0.3108703 , 0.30550922, 0.34802737, 0.3598789 ,\n 0.33044302, 0.32561536, 0.3280688 , 0.27688422, 0.32574053,\n 0.34126684, 0.32026685, 0.3612283 , 0.33815149, 0.35135595,\n 0.31339586, 0.2685907 , 0.38227416, 0.32831095, 0.30637638,\n 0.30316028, 0.33102876, 0.30830675, 0.34378341, 0.35524187,\n 0.34979306, 0.29723679, 0.31172538, 0.30991546, 0.31962821,\n 0.29505249, 0.29483873, 0.31988645, 0.31278275, 0.3598557 ,\n 0.35006166, 0.30024443, 0.31997352, 0.33128739, 0.31501099,\n 0.32501157, 0.26393484, 0.37217065, 0.33379301, 0.32298852,\n 0.3632949 , 0.31793521, 0.34308139, 0.28944641, 0.34409082]]) a
(chain, draw)
float64
1.524 1.478 1.472 ... 1.498 1.505
array([[1.52404648, 1.47752257, 1.4721998 , 1.48089953, 1.46198926,\n 1.46958481, 1.49523611, 1.510066 , 1.43589 , 1.4409425 ,\n 1.43823065, 1.43046948, 1.42296436, 1.42569271, 1.44665738,\n 1.42020325, 1.43583149, 1.45289053, 1.46443623, 1.48505073,\n 1.44861014, 1.47231548, 1.46834995, 1.48731428, 1.52802616,\n 1.47480619, 1.49247897, 1.50288216, 1.47782982, 1.4785965 ,\n 1.45780625, 1.47784455, 1.47843611, 1.49300427, 1.47666235,\n 1.50355164, 1.50804012, 1.5148544 , 1.46324106, 1.45330552,\n 1.46488191, 1.38953998, 1.40472299, 1.46966382, 1.47572364,\n 1.48743012, 1.46898869, 1.42798092, 1.41852696, 1.42681325,\n 1.52543447, 1.43774658, 1.45952408, 1.46949292, 1.41630533,\n 1.49395141, 1.49957934, 1.46555994, 1.47478144, 1.48050798,\n 1.49082732, 1.47229953, 1.4599569 , 1.50471754, 1.45938736,\n 1.50946718, 1.51645263, 1.4594022 , 1.4537883 , 1.44751599,\n 1.48227602, 1.47715771, 1.46723385, 1.45348503, 1.49961526,\n 1.50163506, 1.50744691, 1.45649937, 1.43301581, 1.50728951,\n 1.48990379, 1.49478438, 1.47347812, 1.4711035 , 1.45260974,\n 1.50540962, 1.51140044, 1.50287492, 1.49032786, 1.501878 ,\n 1.49757541, 1.49656983, 1.46899731, 1.45463172, 1.48825859,\n 1.5154426 , 1.47776377, 1.44729082, 1.44359356, 1.44707867,\n...\n 1.4708492 , 1.44375619, 1.46250216, 1.44225678, 1.48329098,\n 1.48857633, 1.48944277, 1.48740162, 1.4596699 , 1.45139455,\n 1.45997542, 1.45201328, 1.45990399, 1.46950574, 1.46439126,\n 1.45690341, 1.45508613, 1.4480673 , 1.45800409, 1.50080038,\n 1.50449242, 1.47001773, 1.47577718, 1.46672821, 1.47814467,\n 1.49257454, 1.45185575, 1.46009842, 1.46372563, 1.49306585,\n 1.45408791, 1.47610011, 1.44970896, 1.44902254, 1.47278133,\n 1.47528219, 1.48080348, 1.4870536 , 1.49847559, 1.53102066,\n 1.50783965, 1.47283927, 1.47869135, 1.46820036, 1.44058426,\n 1.46969439, 1.4454862 , 1.44695969, 1.48293411, 1.45750964,\n 1.47925308, 1.47140024, 1.50170834, 1.45170734, 1.44573136,\n 1.49164674, 1.46333783, 1.47959515, 1.51398989, 1.49670073,\n 1.44650479, 1.51241137, 1.50938976, 1.51388485, 1.47128273,\n 1.45242378, 1.43207444, 1.51968662, 1.53425264, 1.45449217,\n 1.52570222, 1.54378387, 1.51725649, 1.48601369, 1.50245823,\n 1.48525451, 1.47350519, 1.44737952, 1.48182425, 1.49177928,\n 1.50033896, 1.49435895, 1.46919875, 1.49010261, 1.52174287,\n 1.42068495, 1.45130441, 1.45235299, 1.51627283, 1.43859112,\n 1.45490598, 1.48932967, 1.52331185, 1.50319527, 1.51662502,\n 1.43178552, 1.48543435, 1.48504987, 1.49834864, 1.50546398]]) t
(chain, draw)
float64
0.52 0.5154 ... 0.4975 0.5063
array([[0.52004269, 0.51538718, 0.51316046, 0.51503642, 0.53743687,\n 0.51438363, 0.51132682, 0.51821635, 0.52907661, 0.53177276,\n 0.52772702, 0.54821698, 0.54472381, 0.53475534, 0.53759017,\n 0.53879086, 0.54421668, 0.52556158, 0.52387966, 0.52024857,\n 0.52269534, 0.53551286, 0.52018528, 0.52637764, 0.49745498,\n 0.5178827 , 0.51079103, 0.52545896, 0.51644281, 0.51195473,\n 0.53253199, 0.51494629, 0.50938031, 0.51122845, 0.5099089 ,\n 0.52889688, 0.50319357, 0.50578445, 0.53071723, 0.52676109,\n 0.51686757, 0.55213128, 0.55822849, 0.51342684, 0.51537907,\n 0.5238431 , 0.52596508, 0.53667214, 0.54981757, 0.55533856,\n 0.51961075, 0.5189647 , 0.51327013, 0.53445604, 0.52590895,\n 0.51301137, 0.51215836, 0.52964686, 0.50987378, 0.52888818,\n 0.51628208, 0.5186243 , 0.52256003, 0.50002023, 0.54355972,\n 0.51017467, 0.50180522, 0.52784698, 0.52866694, 0.5344108 ,\n 0.49818462, 0.49972973, 0.52113336, 0.52836664, 0.50826781,\n 0.51498755, 0.51333724, 0.52113741, 0.54269354, 0.51701036,\n 0.50828661, 0.51641601, 0.53045716, 0.52030115, 0.53787977,\n 0.50136142, 0.50819816, 0.51304478, 0.5202136 , 0.5201168 ,\n 0.51547842, 0.50282916, 0.52316143, 0.53058404, 0.51017821,\n 0.51942031, 0.51803405, 0.52991605, 0.52413342, 0.52744094,\n...\n 0.52088939, 0.5376197 , 0.52194357, 0.53813961, 0.50046928,\n 0.52342184, 0.52652306, 0.5221357 , 0.5272804 , 0.52383175,\n 0.51263425, 0.53453993, 0.52936587, 0.52095662, 0.5474571 ,\n 0.52739557, 0.5343509 , 0.52475998, 0.52354406, 0.50252296,\n 0.53401152, 0.49770465, 0.51561477, 0.52645412, 0.50609649,\n 0.52003458, 0.53584322, 0.52951669, 0.53478155, 0.51185743,\n 0.52899152, 0.51654028, 0.54410986, 0.51824206, 0.52030186,\n 0.51562149, 0.50879875, 0.52283317, 0.51619112, 0.49568184,\n 0.51538921, 0.52488106, 0.53051236, 0.53709185, 0.52300723,\n 0.52983619, 0.52476546, 0.51921238, 0.5233469 , 0.52214536,\n 0.51157629, 0.51926769, 0.50464458, 0.52748766, 0.54354188,\n 0.5024137 , 0.51851758, 0.51089044, 0.51514813, 0.49566903,\n 0.53028437, 0.50643215, 0.51322359, 0.50322748, 0.53620858,\n 0.52528576, 0.53402367, 0.512197 , 0.50232899, 0.53159883,\n 0.48634089, 0.48603192, 0.51423471, 0.50405998, 0.52349343,\n 0.5244982 , 0.52092011, 0.54064745, 0.50934 , 0.51438502,\n 0.50904863, 0.51526788, 0.52926207, 0.50287432, 0.51605625,\n 0.53841302, 0.53903527, 0.51791899, 0.49857545, 0.54100202,\n 0.53447631, 0.52062898, 0.50198228, 0.51593239, 0.50979921,\n 0.52880215, 0.51523066, 0.5204303 , 0.49749151, 0.5063395 ]]) v_1|participant_id_sigma
(chain, draw)
float64
0.6355 0.5745 ... 0.3167 0.376
array([[0.63553682, 0.57452018, 0.4233875 , 0.46322222, 0.36460255,\n 0.39461434, 0.40984348, 0.38361176, 0.35453732, 0.3259223 ,\n 0.31424873, 0.32857984, 0.33597814, 0.37551215, 0.43315466,\n 0.34836199, 0.36324262, 0.3243143 , 0.31426926, 0.33818344,\n 0.48890323, 0.42795074, 0.39186939, 0.48406421, 0.35549485,\n 0.41411307, 0.4524688 , 0.43479585, 0.40719998, 0.4450937 ,\n 0.43567294, 0.39358609, 0.37832968, 0.38344829, 0.39262597,\n 0.4469984 , 0.39580713, 0.40386828, 0.33465972, 0.37808709,\n 0.41302173, 0.35446801, 0.42589136, 0.46834004, 0.44798922,\n 0.54049595, 0.47664917, 0.35584418, 0.37812215, 0.38749209,\n 0.50082995, 0.59957343, 0.45170506, 0.56353923, 0.4448373 ,\n 0.48896083, 0.54236287, 0.52422913, 0.49102051, 0.5467298 ,\n 0.41256291, 0.47188669, 0.36951325, 0.36331162, 0.39648196,\n 0.36772461, 0.4045846 , 0.43568975, 0.40161813, 0.41895213,\n 0.39822832, 0.43123796, 0.42695834, 0.46532618, 0.58978394,\n 0.50703217, 0.45866018, 0.32630943, 0.3805496 , 0.52690908,\n 0.54947788, 0.57221052, 0.46511427, 0.51236312, 0.43943473,\n 0.33631527, 0.32487395, 0.32028077, 0.32243155, 0.31343928,\n 0.33003572, 0.29311979, 0.29103385, 0.30192973, 0.32212276,\n 0.29295372, 0.29351732, 0.35763712, 0.30448973, 0.31311144,\n...\n 0.37668384, 0.37563508, 0.48047295, 0.49192018, 0.37784189,\n 0.33568027, 0.625703 , 0.56888169, 0.53155103, 0.44167541,\n 0.51407488, 0.53582249, 0.43641033, 0.55451149, 0.64770326,\n 0.51951786, 0.49296093, 0.48467235, 0.62848813, 0.31425561,\n 0.40467542, 0.42901348, 0.52597381, 0.52361571, 0.55457856,\n 0.67672558, 0.53361462, 0.6552775 , 0.75153224, 0.39595397,\n 0.35147004, 0.40456044, 0.3937646 , 0.44342947, 0.48096641,\n 0.46081975, 0.44936368, 0.36763398, 0.42210344, 0.4340471 ,\n 0.43997863, 0.35671832, 0.35878831, 0.3008445 , 0.31532414,\n 0.3245747 , 0.3120385 , 0.30885983, 0.30778813, 0.2692038 ,\n 0.3053557 , 0.3228318 , 0.39898254, 0.41675797, 0.44790313,\n 0.4598367 , 0.4243249 , 0.50441762, 0.44979712, 0.43412651,\n 0.42999159, 0.374827 , 0.40812358, 0.42209259, 0.4186892 ,\n 0.42725893, 0.38752006, 0.61339704, 0.54171087, 0.46446663,\n 0.51392797, 0.49996905, 0.43339139, 0.38086446, 0.36898951,\n 0.33567912, 0.41415182, 0.50941457, 0.31032336, 0.28252796,\n 0.31986061, 0.31774015, 0.32528287, 0.4122418 , 0.3687313 ,\n 0.45235409, 0.36221697, 0.3892919 , 0.43615479, 0.4755106 ,\n 0.47441756, 0.45437346, 0.59216893, 0.50706543, 0.51259656,\n 0.28496338, 0.26251473, 0.29080516, 0.31674855, 0.37599661]]) Attributes: (8)
created_at : 2026-01-08T05:23:59.285575+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 1005.5311143398285 tuning_steps : 500 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n log_likelihood \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 24MB\nDimensions: (chain: 2, draw: 500, __obs__: 3000)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 6 ... 493 494 495 496 497 498 499\n * __obs__ (__obs__) int64 24kB 0 1 2 3 4 5 ... 2995 2996 2997 2998 2999\nData variables:\n rt,response (chain, draw, __obs__) float64 24MB -0.944 -1.644 ... -4.98\nAttributes:\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 \n \n
\n \n \n \n \n sample_stats \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 134kB\nDimensions: (chain: 2, draw: 500)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 4kB 0 1 2 3 4 5 ... 495 496 497 498 499\nData variables: (12/18)\n divergences (chain, draw) int64 8kB 0 0 0 0 0 0 0 ... 0 0 0 0 0 0\n step_size_bar (chain, draw) float64 8kB 0.1618 0.1618 ... 0.1967\n smallest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n acceptance_rate (chain, draw) float64 8kB 0.9976 0.8164 ... 0.984\n perf_counter_diff (chain, draw) float64 8kB 1.972 1.946 ... 0.4791\n n_steps (chain, draw) float64 8kB 63.0 63.0 ... 31.0 15.0\n ... ...\n diverging (chain, draw) bool 1kB False False ... False False\n energy_error (chain, draw) float64 8kB -0.1432 0.234 ... -0.07324\n largest_eigval (chain, draw) float64 8kB nan nan nan ... nan nan nan\n tree_depth (chain, draw) int64 8kB 6 6 5 5 5 4 5 ... 4 5 4 4 5 4\n max_energy_error (chain, draw) float64 8kB -0.4221 0.4662 ... -0.2357\n energy (chain, draw) float64 8kB 5.208e+03 ... 5.219e+03\nAttributes:\n created_at: 2026-01-08T05:23:59.298882+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 1005.5311143398285\n tuning_steps: 500\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (18)
divergences
(chain, draw)
int64
0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0
array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n...\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) step_size_bar
(chain, draw)
float64
0.1618 0.1618 ... 0.1967 0.1967
array([[0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n 0.16175598, 0.16175598, 0.16175598, 0.16175598, 0.16175598,\n...\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ,\n 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 , 0.1967063 ]]) smallest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) acceptance_rate
(chain, draw)
float64
0.9976 0.8164 ... 0.9933 0.984
array([[0.99758581, 0.81635596, 0.8819794 , 0.84561243, 0.88817493,\n 0.94959069, 0.71482006, 0.92341446, 0.97848979, 0.77269743,\n 0.44694704, 0.91209741, 0.88376377, 0.8763816 , 0.96890217,\n 0.9729305 , 0.52749659, 1. , 0.80812117, 0.80445545,\n 0.53525132, 0.81031134, 0.72585537, 0.99724106, 0.71320133,\n 1. , 0.46783262, 0.99466117, 0.81536424, 0.81314544,\n 0.75042429, 0.60186296, 0.82514354, 0.87105883, 0.76920495,\n 0.72678538, 0.95618256, 0.81831237, 0.99525888, 0.41876649,\n 0.99927723, 0.58718857, 0.42913721, 0.92537861, 0.75503309,\n 0.85054594, 0.3914135 , 0.93607973, 0.77760587, 0.79088856,\n 0.69602095, 0.90222335, 0.88725163, 0.67255817, 0.91427711,\n 0.88427101, 0.94625645, 0.83123205, 0.23808846, 0.88620189,\n 0.57718202, 0.99244293, 0.58170408, 0.99264019, 0.99112457,\n 0.6422391 , 0.91504257, 0.87446689, 0.6622421 , 0.46902443,\n 0.96450412, 0.99193909, 0.90520371, 0.78765638, 0.3354559 ,\n 0.84652249, 0.92861784, 0.85462129, 0.38998098, 0.7923972 ,\n 0.99728688, 0.86177815, 0.52886564, 0.9530452 , 0.86634944,\n 0.76249625, 0.67011136, 0.76272388, 0.9195145 , 0.88988851,\n 0.84175278, 0.92755731, 0.96239858, 0.88191921, 0.88963405,\n 0.80421648, 0.5897518 , 0.94348246, 0.95685636, 0.93823459,\n...\n 0.99592457, 0.786117 , 0.97976454, 0.85926248, 0.84403089,\n 0.92522401, 0.95370192, 0.29322813, 0.79593526, 0.82976873,\n 0.97420158, 0.94009835, 0.88291102, 0.87068416, 0.93433384,\n 0.9989092 , 0.85055748, 0.91973962, 0.68738577, 0.36204668,\n 0.7034888 , 0.7247684 , 0.93206659, 0.81275402, 0.95443018,\n 0.98592423, 0.82422526, 0.84000484, 0.89083167, 0.43623017,\n 0.95114438, 0.96422226, 0.78307437, 0.91577397, 0.61180105,\n 0.84777803, 0.76052447, 0.6847553 , 0.86713171, 0.71265747,\n 0.85893138, 0.69794843, 0.62740829, 0.96445025, 0.90973233,\n 0.96165908, 0.94800599, 0.89583704, 0.56709911, 1. ,\n 0.79280759, 0.85932074, 0.98898102, 0.93876875, 0.89060736,\n 0.97026656, 0.79928608, 0.92757424, 0.72916573, 0.65368683,\n 0.71201898, 0.99584531, 0.85204783, 0.96339451, 0.89487845,\n 0.71692021, 0.97269593, 0.98901271, 0.99051328, 0.73427122,\n 0.88118376, 0.82412721, 0.79045784, 0.99797798, 0.82055452,\n 0.710347 , 0.92433726, 0.95461906, 0.86863539, 0.91086332,\n 0.8879656 , 0.83183121, 0.99821342, 0.88642839, 0.78822319,\n 0.97361248, 0.91849006, 0.99567641, 0.70401569, 0.90471428,\n 0.98700968, 0.94093935, 0.86032952, 0.5538063 , 0.73053271,\n 0.58416636, 0.84254157, 0.96012033, 0.99334285, 0.98399017]]) perf_counter_diff
(chain, draw)
float64
1.972 1.946 1.048 ... 0.9771 0.4791
array([[1.97245191, 1.94563195, 1.04812147, 0.98744208, 0.99380561,\n 0.49020746, 0.999191 , 0.99334113, 0.99090447, 0.98678417,\n 0.50300359, 1.01254958, 0.48346748, 1.96046413, 0.95967062,\n 0.49627266, 0.47080349, 0.49333088, 0.47729407, 0.49360134,\n 0.98478196, 0.99191847, 0.98289815, 1.0096586 , 0.94856696,\n 0.50054715, 0.97940488, 0.97503921, 0.96623454, 0.74774866,\n 0.98339328, 0.7489809 , 0.51020976, 0.27794466, 1.01891883,\n 1.00312067, 1.1194389 , 0.48564743, 1.02511663, 0.50663801,\n 0.98509436, 0.98249072, 0.48783634, 1.9762095 , 1.98218256,\n 0.7464331 , 0.97004984, 0.73458388, 0.97378993, 0.49302153,\n 0.99257826, 0.97134887, 1.97556722, 1.0051461 , 0.98618614,\n 0.98991993, 0.99242171, 0.97532516, 1.95812044, 0.9859147 ,\n 1.97159603, 0.49780085, 0.47826947, 0.97337272, 1.01640199,\n 1.99567435, 0.49300269, 0.72564302, 0.97651231, 0.51164027,\n 0.48784926, 0.48449399, 0.97936339, 0.47586402, 0.97830223,\n 0.98731701, 1.49103962, 0.99260327, 0.49790654, 0.98173234,\n 0.48981127, 0.24880263, 0.97343997, 0.99460957, 1.45680868,\n 1.51803315, 0.49471646, 0.48937951, 0.51979798, 0.50492373,\n 0.50381294, 0.48440628, 0.48641204, 0.48539975, 0.49619017,\n 0.50227645, 0.4986405 , 1.46929727, 0.48492221, 0.49059632,\n...\n 1.00382533, 0.97139873, 1.96391639, 0.49525539, 1.9536252 ,\n 0.47045789, 2.01103022, 1.2376425 , 2.24863887, 1.13549931,\n 1.0654159 , 0.50615544, 0.54998775, 1.04154514, 0.7809652 ,\n 2.18331686, 0.50090446, 0.5008897 , 0.62264122, 1.001258 ,\n 0.53292246, 0.9934328 , 0.50078826, 1.49034337, 1.49053745,\n 0.73792974, 1.51248105, 0.53417042, 0.23905443, 1.97396911,\n 0.99116553, 0.50100049, 0.48512602, 0.47421695, 1.9708232 ,\n 0.49898698, 1.491578 , 0.47842922, 1.00787015, 0.99766489,\n 0.50232646, 0.99160105, 0.49040949, 0.48119557, 0.47453208,\n 0.50014823, 0.48874877, 0.48499825, 0.49581988, 0.49336202,\n 0.96293332, 0.53658807, 1.02160711, 0.4865251 , 0.5210289 ,\n 0.48967167, 1.49221501, 1.50777809, 1.94600006, 0.50176379,\n 0.98795345, 0.48842025, 0.98016945, 0.50960964, 1.01951464,\n 0.98112446, 1.93715836, 2.00624895, 0.49406048, 0.97227126,\n 0.9872888 , 0.48799975, 0.50964859, 0.49444775, 0.48834658,\n 0.48675884, 1.96296737, 0.49727979, 1.74835333, 0.50122489,\n 0.48542562, 0.99073861, 0.99273751, 0.73526222, 2.07130324,\n 1.01696966, 0.98409495, 0.75123959, 0.5081273 , 1.00853939,\n 0.49366394, 0.74424574, 0.97714399, 0.99114294, 0.47799056,\n 1.02295742, 0.4969981 , 0.48311284, 0.97709292, 0.47913915]]) n_steps
(chain, draw)
float64
63.0 63.0 31.0 ... 15.0 31.0 15.0
array([[ 63., 63., 31., 31., 31., 15., 31., 31., 31., 31., 15.,\n 31., 15., 63., 31., 15., 15., 15., 15., 15., 31., 31.,\n 31., 31., 31., 15., 31., 31., 31., 23., 31., 23., 15.,\n 7., 31., 31., 31., 15., 31., 15., 31., 31., 15., 63.,\n 63., 23., 31., 23., 31., 15., 31., 31., 63., 31., 31.,\n 31., 31., 31., 63., 31., 63., 15., 15., 31., 31., 63.,\n 15., 23., 31., 15., 15., 15., 31., 15., 31., 31., 47.,\n 31., 15., 31., 15., 7., 31., 31., 47., 47., 15., 15.,\n 15., 15., 15., 15., 15., 15., 15., 15., 15., 47., 15.,\n 15., 15., 15., 15., 15., 15., 15., 63., 31., 31., 15.,\n 63., 15., 31., 47., 15., 47., 31., 63., 31., 31., 31.,\n 31., 31., 23., 31., 15., 31., 15., 31., 15., 31., 31.,\n 31., 7., 63., 7., 63., 15., 31., 31., 47., 31., 31.,\n 47., 31., 15., 31., 15., 23., 7., 31., 15., 31., 31.,\n 15., 31., 15., 31., 31., 31., 31., 31., 31., 31., 63.,\n 15., 63., 47., 15., 31., 15., 31., 31., 31., 31., 31.,\n 15., 15., 31., 47., 31., 23., 31., 31., 47., 31., 15.,\n 31., 31., 31., 31., 31., 31., 15., 7., 7., 23., 63.,\n 31., 47., 15., 15., 47., 15., 31., 47., 63., 31., 31.,\n 15., 15., 31., 15., 15., 15., 31., 63., 15., 15., 63.,\n...\n 31., 15., 7., 15., 15., 31., 15., 31., 15., 31., 63.,\n 31., 15., 39., 31., 15., 31., 63., 23., 15., 31., 31.,\n 15., 15., 31., 31., 31., 47., 15., 15., 31., 15., 31.,\n 15., 7., 7., 15., 47., 15., 15., 15., 15., 31., 15.,\n 15., 15., 31., 47., 63., 7., 15., 31., 31., 15., 15.,\n 31., 31., 31., 15., 31., 47., 23., 15., 31., 31., 31.,\n 15., 15., 15., 15., 7., 39., 47., 15., 31., 15., 15.,\n 15., 31., 15., 31., 31., 7., 7., 15., 15., 63., 31.,\n 63., 7., 15., 15., 31., 15., 31., 31., 63., 15., 15.,\n 15., 31., 15., 15., 31., 31., 31., 15., 15., 31., 15.,\n 63., 31., 63., 47., 31., 31., 63., 15., 63., 15., 63.,\n 31., 63., 31., 31., 15., 15., 31., 23., 63., 15., 15.,\n 15., 31., 15., 31., 15., 47., 47., 23., 47., 15., 7.,\n 63., 31., 15., 15., 15., 63., 15., 47., 15., 31., 31.,\n 15., 31., 15., 15., 15., 15., 15., 15., 15., 15., 31.,\n 15., 31., 15., 15., 15., 47., 47., 63., 15., 31., 15.,\n 31., 15., 31., 31., 63., 63., 15., 31., 31., 15., 15.,\n 15., 15., 15., 63., 15., 55., 15., 15., 31., 31., 23.,\n 63., 31., 31., 23., 15., 31., 15., 23., 31., 31., 15.,\n 31., 15., 15., 31., 15.]]) process_time_diff
(chain, draw)
float64
6.056 6.07 3.134 ... 3.008 1.504
array([[ 6.05637367, 6.06996776, 3.13408418, 3.03031661, 3.02146452,\n 1.50948242, 3.04389251, 3.07353969, 3.02912774, 3.01697688,\n 1.52100565, 3.02922849, 1.50700649, 6.06775157, 3.02583166,\n 1.51839629, 1.54563234, 1.52301921, 1.51369554, 1.52556366,\n 3.03079036, 3.03358521, 3.03556081, 3.02299767, 3.02831248,\n 1.50919272, 3.08272734, 3.02408072, 3.02887227, 2.28114985,\n 3.07331881, 2.25039423, 1.50680806, 0.78964505, 3.01397765,\n 3.0230629 , 2.99418022, 1.50805864, 3.16980609, 1.51804788,\n 3.03960666, 3.02329692, 1.50895599, 6.07424252, 6.13941276,\n 2.26760386, 3.03058476, 2.27848102, 3.02671399, 1.51644744,\n 3.05691495, 3.02467149, 6.1095443 , 3.07330448, 3.01636025,\n 3.00478163, 3.01492332, 3.09034101, 6.06485508, 3.04890011,\n 6.08443305, 1.5031727 , 1.52038773, 3.0235466 , 3.10120667,\n 6.15213363, 1.51472235, 2.26312997, 3.05947477, 1.52286108,\n 1.52018589, 1.50266286, 3.02817738, 1.50645215, 3.08318475,\n 3.0521771 , 4.53279977, 3.06599213, 1.51011075, 3.03300916,\n 1.51024132, 0.75556448, 3.01417389, 3.0589184 , 4.52330697,\n 4.5849456 , 1.50815085, 1.53897847, 1.53011506, 1.5085183 ,\n 1.55650537, 1.52111429, 1.50304319, 1.50504282, 1.51918458,\n 1.51708003, 1.51639834, 4.58853881, 1.5707911 , 1.50949843,\n...\n 3.03946211, 3.02911788, 6.10277825, 1.53521771, 6.13291403,\n 1.51530102, 6.08595242, 3.17553221, 6.27512867, 3.24934157,\n 3.23637945, 1.52589236, 1.64582203, 3.16667833, 2.30039457,\n 6.29214051, 1.5186895 , 1.56293338, 1.49330054, 3.07627223,\n 1.6295445 , 3.04236461, 1.57969857, 4.56718122, 4.60564979,\n 2.27137263, 4.58000446, 1.51394159, 0.74153018, 6.07314807,\n 3.03006792, 1.52941649, 1.50809109, 1.50792625, 6.10824363,\n 1.51953611, 4.63483571, 1.51338054, 3.05084209, 3.03415827,\n 1.51332495, 3.08491458, 1.51893808, 1.52255643, 1.5313255 ,\n 1.51909378, 1.52235968, 1.57562953, 1.52345881, 1.51414384,\n 3.04119896, 1.6023279 , 3.09514447, 1.51130715, 1.53550453,\n 1.51684417, 4.56818292, 4.65973527, 6.07768024, 1.51161754,\n 3.04864217, 1.51881003, 3.03528571, 1.5042764 , 3.03352164,\n 3.03597802, 6.08297226, 6.07028999, 1.52009686, 3.03198938,\n 3.04801854, 1.52220533, 1.51135717, 1.50753952, 1.52250538,\n 1.51629277, 6.0781398 , 1.52187254, 5.32270063, 1.54675202,\n 1.525826 , 3.05061747, 3.02359115, 2.28610076, 6.11567384,\n 3.04734916, 3.03271011, 2.36853646, 1.5098977 , 3.0364505 ,\n 1.50750926, 2.28280601, 3.0376898 , 3.02993451, 1.53049307,\n 3.07605586, 1.5198744 , 1.51091103, 3.0075352 , 1.50361436]]) lp
(chain, draw)
float64
-5.2e+03 -5.201e+03 ... -5.208e+03
array([[-5200.20891701, -5200.87540081, -5201.32775315, -5204.48401124,\n -5204.42751873, -5204.31368038, -5208.23743044, -5206.12559672,\n -5208.12669703, -5205.78809872, -5212.08805204, -5206.99083718,\n -5207.18498888, -5212.35723731, -5202.80633934, -5208.36675146,\n -5208.33496019, -5207.99909187, -5205.6838575 , -5204.71366644,\n -5203.67055528, -5203.20295934, -5202.97741968, -5205.21752932,\n -5206.46617181, -5198.85017753, -5204.59925384, -5201.95746297,\n -5205.61331919, -5203.0167636 , -5204.52066582, -5200.27476081,\n -5200.75855404, -5205.83646173, -5206.82907755, -5203.31531242,\n -5207.19221647, -5209.98172921, -5206.95360391, -5207.66432786,\n -5203.27242527, -5215.85915958, -5210.39810484, -5207.50422335,\n -5207.66741476, -5199.87357785, -5198.63742173, -5204.15700606,\n -5206.11470774, -5208.80308305, -5202.54248608, -5202.66042041,\n -5203.42610754, -5202.68420555, -5206.12381952, -5201.45527223,\n -5199.13366692, -5198.29571758, -5198.38783799, -5202.99131239,\n -5201.33498492, -5200.96728869, -5209.49059318, -5206.30776094,\n -5208.95485108, -5204.367673 , -5208.01692727, -5201.69731918,\n -5203.25890904, -5211.47864384, -5216.92941181, -5207.04699751,\n -5204.4990466 , -5199.16247143, -5205.6945493 , -5203.96785397,\n -5204.54524597, -5210.08954302, -5208.34683288, -5204.61446599,\n...\n -5205.32923505, -5205.65387247, -5204.19413191, -5199.72459165,\n -5198.43642824, -5194.72028658, -5194.83329748, -5195.57571263,\n -5196.67859547, -5202.08391082, -5204.75696213, -5204.56554271,\n -5206.3362594 , -5206.27735886, -5206.80076051, -5202.98365276,\n -5206.89236187, -5208.59263611, -5203.16822966, -5207.75675107,\n -5200.79303822, -5205.16382551, -5209.11977989, -5212.74333394,\n -5214.21838173, -5208.98416271, -5206.7529862 , -5208.47012695,\n -5213.59362125, -5211.23046205, -5212.50407647, -5204.57146157,\n -5201.35766389, -5200.47206607, -5202.01772262, -5201.85111531,\n -5207.18336605, -5205.42040966, -5208.48206985, -5203.6464641 ,\n -5204.6328827 , -5200.26022696, -5202.90084198, -5202.89444226,\n -5204.69300997, -5204.43601122, -5204.54900623, -5202.92973007,\n -5198.69690304, -5201.74280079, -5203.71728813, -5209.32688073,\n -5207.01210884, -5204.88267816, -5210.74163928, -5209.29837496,\n -5204.05345805, -5203.30013617, -5208.33502365, -5208.3650942 ,\n -5206.94118275, -5208.33053936, -5209.89752773, -5203.02120571,\n -5207.70411646, -5206.64428995, -5208.04803645, -5205.14996748,\n -5204.81072812, -5208.27081386, -5202.25765021, -5203.13009783,\n -5205.58489088, -5203.45884443, -5206.42488776, -5211.55279875,\n -5208.03404586, -5212.25804914, -5211.93177688, -5208.21450275]]) perf_counter_start
(chain, draw)
float64
3.732e+03 3.734e+03 ... 4.135e+03
array([[3732.02725514, 3734.00025427, 3735.94630999, 3736.99481752,\n 3737.98281097, 3738.97702281, 3739.46786097, 3740.46913894,\n 3741.46287934, 3742.45492161, 3743.44228463, 3743.94739801,\n 3744.96039448, 3745.44443346, 3747.40532919, 3748.36556622,\n 3748.86238274, 3749.33485451, 3749.82873681, 3750.30671981,\n 3750.80088664, 3751.78618582, 3752.77870373, 3753.76405463,\n 3754.77426494, 3755.72325254, 3756.225287 , 3757.20525529,\n 3758.18084894, 3759.14756981, 3759.89577781, 3760.87967985,\n 3761.62977146, 3762.14067399, 3762.41909002, 3763.43860347,\n 3764.44213717, 3765.56224643, 3766.04934369, 3767.07495589,\n 3767.58282473, 3768.56853468, 3769.55146754, 3770.03977036,\n 3772.01650849, 3773.99928979, 3774.74837989, 3775.71897598,\n 3776.45415954, 3777.4284855 , 3777.92206322, 3778.91511671,\n 3779.88703231, 3781.8645257 , 3782.8703908 , 3783.85715943,\n 3784.8476333 , 3785.84048152, 3786.81638237, 3788.77492302,\n 3789.76124723, 3791.7344523 , 3792.23265425, 3792.71178563,\n 3793.68573393, 3794.7027582 , 3796.69883889, 3797.19242001,\n 3797.91864643, 3798.89567533, 3799.40778787, 3799.89618627,\n 3800.38119481, 3801.36360923, 3801.84001783, 3802.82005544,\n 3803.80794996, 3805.2995679 , 3806.29278184, 3806.79122386,\n...\n 4066.28045939, 4066.81385677, 4067.80786484, 4068.30914993,\n 4069.79996494, 4071.29102878, 4072.02949586, 4073.54254619,\n 4074.07728472, 4074.31691487, 4076.29143123, 4077.28314059,\n 4077.78457395, 4078.27026934, 4078.74604819, 4080.71730387,\n 4081.21667841, 4082.70879109, 4083.18777049, 4084.19618427,\n 4085.19431163, 4085.69907056, 4086.69133288, 4087.18228428,\n 4087.6638546 , 4088.13880635, 4088.63949192, 4089.12880686,\n 4089.6144186 , 4090.11277281, 4090.60663052, 4091.57016618,\n 4092.10749703, 4093.12966217, 4093.61669868, 4094.13826515,\n 4094.62840703, 4096.12119083, 4097.62949232, 4099.57610314,\n 4100.07830993, 4101.06682538, 4101.55566167, 4102.53785103,\n 4103.04792576, 4104.06810762, 4105.050547 , 4106.98831821,\n 4108.99497958, 4109.48954234, 4110.46245168, 4111.45114808,\n 4111.93955099, 4112.449643 , 4112.9453561 , 4113.43567686,\n 4113.92412568, 4115.88777229, 4116.38559795, 4118.13465988,\n 4118.63853554, 4119.12448979, 4120.1156484 , 4121.10892362,\n 4121.84472096, 4123.91666999, 4124.93420015, 4125.91887844,\n 4126.67064282, 4127.18005484, 4128.18907133, 4128.68342212,\n 4129.42936504, 4130.40698546, 4131.39870263, 4131.87726166,\n 4132.90071486, 4133.39827029, 4133.88179092, 4134.85948719]]) index_in_trajectory
(chain, draw)
int64
-8 -55 25 3 -12 ... 23 10 -3 6 -8
array([[ -8, -55, 25, 3, -12, -13, 11, 6, 11, 2, 1, 6, -3,\n 5, -6, 10, 12, -7, 3, -9, 13, -8, -11, -31, -11, -11,\n 6, 20, 10, 4, 11, -7, 10, -7, -8, 11, 10, 1, 17,\n 10, 14, 11, -6, -12, -2, -8, -4, -10, 3, 2, -15, 20,\n -15, -6, -4, 24, 12, -13, -26, 22, -27, 8, 11, 13, 11,\n 14, -11, -12, 6, 7, 8, -3, -22, -7, -13, -3, 5, -19,\n 10, -19, -8, -4, -9, -13, -19, -14, -6, 14, -5, -2, -2,\n -5, -11, 2, 8, 11, -8, -12, -4, 1, -4, 11, -10, 2,\n -6, -3, -12, 8, -5, -9, 27, -7, 10, 26, -14, 12, -3,\n 9, 10, -13, -9, 25, 6, 11, -4, 5, -13, -7, -10, -5,\n -18, 1, -13, 5, 25, -2, 7, 4, 25, -12, 10, 4, 4,\n 20, 22, -14, -13, -7, -10, -5, 15, -13, 8, -8, 5, -4,\n -7, -16, 24, 7, 13, -15, -14, -24, 18, -6, -10, -25, 3,\n 2, 10, 8, 8, -16, 6, -7, 4, 3, -7, 6, -11, -14,\n 6, 3, 14, 20, -7, 7, 8, -25, -13, 14, 11, -4, 7,\n -4, 3, -13, 13, -3, 6, 11, 15, -12, -20, 10, -21, 10,\n -13, -3, -9, 10, 11, -10, 4, 8, 16, -4, -7, 19, -10,\n -25, 12, 10, -12, -9, -1, -2, -4, -22, 4, -10, 6, 6,\n 10, -6, 4, -5, -27, 16, 10, -8, -16, -3, -23, 10, -7,\n -5, -10, -13, -13, -11, -6, 6, 9, -6, 12, -2, -20, -13,\n...\n -18, -19, -22, -10, 4, -5, -12, -8, -8, 3, -3, 13, 2,\n 10, 5, -10, 5, 10, 8, -19, -11, 8, -9, 5, -5, -8,\n 6, -12, -5, 9, -17, -10, 5, 20, -7, 6, 10, 3, -11,\n -12, -4, -3, -13, -6, -5, -12, -15, 9, -16, 18, 8, -4,\n 14, 16, 10, -14, 14, -4, -5, 12, 6, 8, -6, -15, 11,\n 23, 20, 5, 8, 10, 6, -19, 7, -5, -2, -5, 5, 10,\n 4, -9, -4, -7, -8, -7, -7, -12, -11, 8, 5, 11, -11,\n -10, 8, 2, 17, -12, -3, 5, -10, 14, -7, -6, 4, 2,\n 11, -12, 3, -8, -12, -4, -12, -13, -11, -4, 1, 10, -11,\n -3, 8, 15, 9, -1, 6, 10, 5, 17, 19, -19, 6, -4,\n 7, 26, 7, 7, 8, 13, 4, 12, -7, -16, 6, -12, -21,\n -4, -12, -2, 8, -23, -2, 3, -20, -13, -23, -27, -13, -11,\n -13, 24, -12, 41, 2, 21, -7, 13, 6, 9, -8, -11, -6,\n 8, 2, 6, 26, 15, 6, -12, -12, -19, 8, 19, -13, -5,\n 25, 11, -6, -10, -9, -52, 2, -17, 11, -3, 10, 4, 10,\n -10, -4, -12, 11, -7, -11, -5, -8, -5, -8, 9, -10, -5,\n -8, -15, 16, -7, -11, -21, 10, -4, 4, -8, 6, 6, 16,\n 9, -26, -30, 3, -9, 8, 8, 1, 14, 6, -27, -2, 9,\n 3, -4, -9, 11, -27, -7, -8, 15, 11, -5, -7, 11, 5,\n 5, 23, 10, -3, 6, -8]]) reached_max_treedepth
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) step_size
(chain, draw)
float64
0.1358 0.1358 ... 0.2082 0.2082
array([[0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n 0.13576838, 0.13576838, 0.13576838, 0.13576838, 0.13576838,\n...\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575,\n 0.20823575, 0.20823575, 0.20823575, 0.20823575, 0.20823575]]) diverging
(chain, draw)
bool
False False False ... False False
array([[False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n...\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False, False, False, False, False,\n False, False, False, False, False]]) energy_error
(chain, draw)
float64
-0.1432 0.234 ... -0.2511 -0.07324
array([[-1.43171886e-01, 2.34003937e-01, 1.95721692e-01,\n 4.30997254e-01, -5.22954272e-01, -8.35328564e-03,\n 6.06600029e-01, -7.79646664e-02, -1.11080472e-01,\n 9.94726207e-02, 2.36370065e+00, -2.50180608e+00,\n -2.44678385e-01, 2.00627917e-01, -3.56099578e-01,\n 4.38276794e-02, 1.28759099e+00, -5.65161940e-01,\n -5.88062825e-01, -2.97241168e-01, 4.62512441e-01,\n -3.22355115e-01, 2.24439872e-02, -6.81109018e-02,\n 4.69799670e-01, -4.91229778e-01, 5.29790609e-01,\n -4.79965452e-01, 2.64286998e-01, -3.41230109e-01,\n 3.94991553e-02, 2.76260658e-01, -5.26926482e-01,\n 7.41149137e-02, -1.05523708e-01, 4.87873723e-01,\n 2.54791498e-01, 6.33039045e-03, -9.07664177e-01,\n 3.57981267e-01, -3.02115564e-01, 5.97328319e-01,\n -9.98642171e-01, 5.39024635e-02, 9.76332054e-02,\n -4.43861842e-01, 8.64850727e-02, -9.21720715e-02,\n -6.78733082e-02, 1.87231522e-01, -2.30085590e-01,\n 1.51975412e-01, 1.42631377e-02, -3.58325574e-02,\n 2.15417390e-01, -1.36252085e-02, -4.90632741e-01,\n 2.42874365e-02, 4.20012190e-01, 4.25583352e-02,\n...\n 3.30821959e-01, 6.19959098e-02, 1.34809833e-01,\n -3.12569944e-01, 5.55035077e-02, -1.77507634e-02,\n 3.24912689e-01, 1.44530158e-01, -9.11075472e-02,\n 4.59041130e-02, -7.44948150e-01, -2.96440547e-02,\n -1.40461946e-03, -3.63274197e-02, 1.35621928e-01,\n 4.90866533e-01, -2.15235966e-01, -1.10427062e-01,\n -2.27660061e-01, 1.94590318e-01, -3.25748354e-01,\n 1.61799958e-01, -9.00420674e-02, 2.44640443e-01,\n 6.09937314e-01, -7.27847484e-01, -7.86080421e-02,\n -8.32434528e-01, 5.34612545e-01, -3.04019312e-01,\n 3.85714680e-01, -4.06519667e-03, -1.61477117e-01,\n 4.10483209e-01, -7.18101513e-02, -1.25036213e-01,\n -1.09358901e-01, 2.54732757e-01, 8.20763669e-02,\n 1.42161476e-02, 4.80628440e-01, -4.07231159e-01,\n -1.92271014e-01, 7.87427803e-01, -9.13417274e-01,\n 5.13799455e-02, -2.87267497e-01, 6.17522930e-01,\n -3.28612805e-01, -1.66335605e-01, -1.38270962e-02,\n -4.17334555e-01, 2.22846424e-01, -1.77716270e-01,\n 4.30988496e-01, 8.38088808e-02, 1.96071324e-01,\n -2.51066044e-01, -7.32369520e-02]]) largest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n...\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,\n nan, nan, nan, nan, nan, nan]]) tree_depth
(chain, draw)
int64
6 6 5 5 5 4 5 5 ... 5 5 4 5 4 4 5 4
array([[6, 6, 5, 5, 5, 4, 5, 5, 5, 5, 4, 5, 4, 6, 5, 4, 4, 4, 4, 4, 5, 5,\n 5, 5, 5, 4, 5, 5, 5, 5, 5, 5, 4, 3, 5, 5, 5, 4, 5, 4, 5, 5, 4, 6,\n 6, 5, 5, 5, 5, 4, 5, 5, 6, 5, 5, 5, 5, 5, 6, 5, 6, 4, 4, 5, 5, 6,\n 4, 5, 5, 4, 4, 4, 5, 4, 5, 5, 6, 5, 4, 5, 4, 3, 5, 5, 6, 6, 4, 4,\n 4, 4, 4, 4, 4, 4, 4, 4, 4, 6, 4, 4, 4, 4, 4, 4, 4, 4, 6, 5, 5, 4,\n 6, 4, 5, 6, 4, 6, 5, 6, 5, 5, 5, 5, 5, 5, 5, 4, 5, 4, 5, 4, 5, 5,\n 5, 3, 6, 3, 6, 4, 5, 5, 6, 5, 5, 6, 5, 4, 5, 4, 5, 3, 5, 4, 5, 5,\n 4, 5, 4, 5, 5, 5, 5, 5, 5, 5, 6, 4, 6, 6, 4, 5, 4, 5, 5, 5, 5, 5,\n 4, 4, 5, 6, 5, 5, 5, 5, 6, 5, 4, 5, 5, 5, 5, 5, 5, 4, 3, 3, 5, 6,\n 5, 6, 4, 4, 6, 4, 5, 6, 6, 5, 5, 4, 4, 5, 4, 4, 4, 5, 6, 4, 4, 6,\n 5, 6, 6, 5, 6, 5, 5, 5, 3, 5, 4, 5, 4, 5, 5, 5, 5, 4, 6, 5, 5, 4,\n 5, 4, 5, 4, 4, 4, 4, 4, 4, 5, 4, 4, 4, 5, 4, 4, 5, 5, 5, 4, 4, 6,\n 5, 5, 5, 4, 6, 3, 5, 5, 5, 4, 6, 4, 4, 4, 5, 4, 5, 5, 6, 4, 5, 4,\n 4, 4, 6, 6, 5, 4, 6, 4, 6, 6, 4, 5, 5, 5, 3, 6, 4, 4, 4, 4, 4, 4,\n 4, 4, 5, 4, 4, 5, 5, 6, 6, 4, 4, 5, 5, 5, 5, 5, 4, 5, 5, 5, 5, 4,\n 6, 5, 5, 5, 6, 6, 5, 4, 4, 4, 5, 5, 5, 5, 6, 4, 5, 5, 3, 6, 5, 5,\n 5, 6, 4, 4, 4, 4, 4, 5, 4, 6, 6, 5, 5, 5, 4, 4, 4, 5, 3, 4, 3, 6,\n 6, 5, 6, 5, 4, 4, 4, 4, 5, 5, 4, 4, 5, 4, 4, 4, 5, 4, 4, 5, 4, 4,\n 6, 4, 4, 4, 5, 4, 5, 5, 4, 3, 3, 5, 6, 4, 5, 5, 5, 4, 6, 5, 4, 5,\n 4, 5, 5, 5, 3, 5, 5, 4, 6, 4, 4, 5, 5, 6, 4, 5, 5, 5, 5, 5, 5, 5,\n...\n 6, 4, 3, 5, 3, 4, 5, 6, 4, 4, 4, 4, 4, 5, 5, 5, 4, 4, 5, 5, 6, 3,\n 4, 5, 4, 4, 4, 5, 5, 5, 5, 5, 5, 3, 3, 6, 4, 4, 4, 4, 4, 4, 4, 4,\n 4, 4, 5, 4, 4, 4, 3, 3, 5, 4, 5, 3, 5, 4, 6, 4, 5, 4, 5, 4, 5, 4,\n 5, 4, 4, 4, 5, 3, 5, 4, 4, 4, 4, 4, 5, 4, 5, 5, 5, 3, 5, 5, 4, 3,\n 4, 4, 5, 3, 5, 4, 4, 5, 4, 5, 4, 4, 4, 6, 5, 6, 7, 3, 5, 5, 5, 5,\n 6, 4, 4, 5, 6, 5, 3, 6, 4, 5, 5, 3, 4, 4, 5, 4, 6, 6, 4, 4, 4, 4,\n 5, 5, 5, 6, 4, 5, 4, 6, 6, 6, 4, 6, 4, 4, 5, 4, 5, 5, 5, 5, 6, 5,\n 5, 4, 5, 5, 4, 6, 5, 4, 6, 6, 4, 7, 6, 5, 4, 5, 4, 6, 4, 4, 5, 4,\n 4, 4, 4, 4, 5, 6, 5, 6, 4, 4, 4, 4, 4, 4, 4, 5, 5, 4, 5, 4, 4, 5,\n 4, 5, 5, 5, 5, 4, 4, 4, 4, 3, 5, 4, 4, 6, 5, 5, 6, 4, 4, 4, 4, 4,\n 5, 4, 3, 4, 4, 5, 4, 5, 4, 5, 6, 5, 4, 6, 5, 4, 5, 6, 5, 4, 5, 5,\n 4, 4, 5, 5, 5, 6, 4, 4, 5, 4, 5, 4, 3, 3, 4, 6, 4, 4, 4, 4, 5, 4,\n 4, 4, 5, 6, 6, 3, 4, 5, 5, 4, 4, 5, 5, 5, 4, 5, 6, 5, 4, 5, 5, 5,\n 4, 4, 4, 4, 3, 6, 6, 4, 5, 4, 4, 4, 5, 4, 5, 5, 3, 3, 4, 4, 6, 5,\n 6, 3, 4, 4, 5, 4, 5, 5, 6, 4, 4, 4, 5, 4, 4, 5, 5, 5, 4, 4, 5, 4,\n 6, 5, 6, 6, 5, 5, 6, 4, 6, 4, 6, 5, 6, 5, 5, 4, 4, 5, 5, 6, 4, 4,\n 4, 5, 4, 5, 4, 6, 6, 5, 6, 4, 3, 6, 5, 4, 4, 4, 6, 4, 6, 4, 5, 5,\n 4, 5, 4, 4, 4, 4, 4, 4, 4, 4, 5, 4, 5, 4, 4, 4, 6, 6, 6, 4, 5, 4,\n 5, 4, 5, 5, 6, 6, 4, 5, 5, 4, 4, 4, 4, 4, 6, 4, 6, 4, 4, 5, 5, 5,\n 6, 5, 5, 5, 4, 5, 4, 5, 5, 5, 4, 5, 4, 4, 5, 4]]) max_energy_error
(chain, draw)
float64
-0.4221 0.4662 ... -0.549 -0.2357
array([[-0.42211931, 0.46619721, -0.39112177, 0.73599281, -0.52295427,\n 0.11961615, 0.68755697, 0.61165235, -0.65061597, 0.81571095,\n 2.43222479, -2.92062309, 0.42006312, 0.39356938, -0.49789392,\n -0.19585847, 1.67397416, -1.47891316, -0.8189544 , 0.6087752 ,\n 1.61578368, 0.86541241, 0.97756482, -0.51347573, 0.8651289 ,\n -0.72623909, 2.0700211 , -0.77698379, 0.57208073, 0.69176024,\n 0.8768792 , 1.18638865, 0.76744429, 0.48572921, 0.71665567,\n 0.82384198, -0.61706528, 0.95492848, -0.92036658, 3.07853043,\n -0.89295782, 1.67622399, 4.6500971 , 0.34276684, 0.88353547,\n 0.64838318, 2.89195714, -0.51094015, 0.82362059, 0.55856567,\n 1.0595535 , 0.45994474, 0.72375647, 0.97809853, -0.62683325,\n 0.51004694, -0.53536511, 0.47842063, 5.52983125, 0.33506817,\n 1.04754407, -0.37493332, 1.36882761, -1.30562767, -0.95436511,\n 1.57795467, -0.92345697, 0.51166725, 1.05589371, 2.66709675,\n -2.83768639, -2.88056444, 0.35173399, 0.50903852, 2.78457993,\n 0.69732823, 0.30888421, 0.42164677, 2.80275748, -2.53435381,\n -0.68637487, 0.44457786, 0.8722067 , -0.38738076, 0.37796232,\n 0.66166201, 0.78127078, 0.73774817, -0.44080571, 0.34958689,\n 0.5036544 , 0.20667885, -0.16731333, 0.33865096, 0.21482913,\n 0.4131586 , 1.70885819, -0.37555146, -0.2940948 , 0.42038804,\n...\n -0.62443717, 0.94722033, -1.00148085, 0.79407192, 1.08411315,\n 0.28038034, -0.91001364, 3.44740998, 0.5773898 , 0.50872312,\n -0.13803746, 0.18700911, 0.2764562 , 0.43660409, -0.52226424,\n -0.92539676, 0.48988517, 0.33034983, 0.82170517, 2.26339672,\n 0.80232237, 0.69513947, -1.06148554, 0.78214752, 0.22829013,\n -0.4022702 , 0.34088901, 0.4546666 , 0.25465691, 1.38055965,\n -0.49085976, -0.17524588, 0.47037321, 0.40117882, 1.77848661,\n 0.62377134, 0.80989148, 0.95944535, -0.83325663, 0.78164307,\n 0.44476728, 0.71534775, 1.24921082, -0.37344954, 0.42792139,\n 0.15385427, 0.20206775, 0.32491269, 1.35981258, -0.59895769,\n 0.93844995, -0.74494815, -0.61654583, 0.19115654, 0.28577228,\n 0.14422699, 0.58325117, -0.78438233, 0.97854812, 1.05837946,\n 0.90207225, -0.50392737, 0.53849437, 0.22098099, 0.31264613,\n 1.14179426, -1.41514635, -0.63305106, -0.85565561, 0.55357049,\n 0.64636327, 0.47622491, 0.70323768, -0.3442301 , 0.41048321,\n 0.85440115, -0.59561788, -0.37499802, 0.47631855, 0.30399387,\n 0.29206803, 0.6002357 , -0.73535154, 0.62561777, 0.87836591,\n -1.06592396, 0.32305131, -0.2872675 , 0.74973086, 0.65105908,\n -0.46049302, 0.41489417, 0.59149456, 1.02172032, 0.83928798,\n 0.97618944, 0.33765199, 0.20683834, -0.54902292, -0.23570317]]) energy
(chain, draw)
float64
5.208e+03 5.213e+03 ... 5.219e+03
array([[5207.85538487, 5212.89653128, 5208.23200408, 5213.42689618,\n 5216.55053055, 5214.87573918, 5214.99680662, 5220.45639596,\n 5216.0774778 , 5218.58490286, 5225.99741874, 5220.86869983,\n 5216.77541122, 5223.65081168, 5221.19448322, 5212.13784653,\n 5222.67602963, 5216.78472148, 5221.39708485, 5214.14383623,\n 5217.31837724, 5214.90601755, 5215.18717093, 5215.24971345,\n 5219.78209302, 5209.72438071, 5212.79414497, 5212.60401737,\n 5214.80856728, 5215.3214985 , 5212.06997888, 5220.05807686,\n 5207.41599106, 5216.1608319 , 5223.14520622, 5216.67298042,\n 5211.8839921 , 5223.04579634, 5221.76596004, 5219.67263023,\n 5214.89378988, 5224.79699758, 5230.15085291, 5225.47732628,\n 5216.50839038, 5217.9384156 , 5211.81749442, 5213.7467318 ,\n 5219.42689629, 5218.59422577, 5215.36619967, 5212.67318807,\n 5215.83626623, 5214.5453407 , 5217.03022242, 5218.08462428,\n 5208.43626633, 5205.68891041, 5210.26363862, 5211.54618354,\n 5214.90365367, 5211.22971433, 5215.94590139, 5219.55221999,\n 5217.14791908, 5217.95842614, 5219.64072387, 5216.54536668,\n 5215.49168077, 5217.7554583 , 5222.98953317, 5221.97519772,\n 5217.10032129, 5212.20787238, 5219.64129066, 5216.59501868,\n 5214.93285843, 5224.00909391, 5221.87113531, 5216.75264338,\n...\n 5221.35386229, 5220.22704372, 5219.98701033, 5213.27908092,\n 5206.85598409, 5204.2539451 , 5205.34563112, 5205.48232182,\n 5206.1752632 , 5206.60720901, 5211.60878524, 5216.98889782,\n 5216.81887533, 5221.80025653, 5219.71450779, 5216.12547972,\n 5219.20208734, 5219.00874904, 5218.04814522, 5216.27828214,\n 5218.36174262, 5214.69220126, 5223.31730532, 5219.76580151,\n 5225.63520998, 5223.97755619, 5216.948141 , 5215.90741727,\n 5224.98157222, 5223.79901275, 5224.8700819 , 5218.04320708,\n 5213.23030437, 5207.4504749 , 5211.49254133, 5208.66442893,\n 5216.29235996, 5219.94520075, 5231.17843701, 5220.85587187,\n 5213.82755328, 5215.77112186, 5211.46436621, 5211.37418576,\n 5215.20314522, 5216.74391067, 5215.83659094, 5213.30246507,\n 5209.88309634, 5207.74510196, 5212.40538059, 5216.07237164,\n 5221.13559398, 5213.38717702, 5218.67399448, 5229.34333312,\n 5218.60149121, 5214.5068901 , 5218.09209166, 5218.78198015,\n 5218.05956768, 5215.83078768, 5217.72467378, 5218.62725768,\n 5213.1837479 , 5216.53236734, 5223.16693089, 5219.03363283,\n 5218.62597886, 5220.7265057 , 5217.60288678, 5213.77771159,\n 5220.58761142, 5225.93067533, 5217.69454185, 5223.20766777,\n 5220.39869564, 5220.62621227, 5223.64084821, 5219.21075905]]) Attributes: (8)
created_at : 2026-01-08T05:23:59.298882+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 1005.5311143398285 tuning_steps : 500 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n observed_data \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 72kB\nDimensions: (__obs__: 3000, rt,response_extra_dim_0: 2)\nCoordinates:\n * __obs__ (__obs__) int64 24kB 0 1 2 3 ... 2997 2998 2999\n * rt,response_extra_dim_0 (rt,response_extra_dim_0) int64 16B 0 1\nData variables:\n rt,response (__obs__, rt,response_extra_dim_0) float64 48kB ...\nAttributes:\n created_at: 2026-01-08T05:23:59.304475+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions: __obs__ : 3000rt,response_extra_dim_0 : 2
Coordinates: (2)
Data variables: (1)
rt,response
(__obs__, rt,response_extra_dim_0)
float64
1.569 1.0 2.622 ... 1.0 4.417 1.0
array([[1.56945038, 1. ],\n [2.6223135 , 1. ],\n [2.72172785, 1. ],\n ...,\n [2.74452996, 1. ],\n [1.22708106, 1. ],\n [4.4169035 , 1. ]], shape=(3000, 2)) Attributes: (6)
created_at : 2026-01-08T05:23:59.304475+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n
\n "
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\nParallel sampling might not work with `jax` backend and the PyMC NUTS sampler on some platforms. Please consider using `nuts_numpyro` or `nuts_blackjax` sampler if that is a problem.\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nMultiprocess sampling (2 chains in 2 jobs)\nNUTS: [z, theta, a, t, v_Intercept, v_x, v_y, v_1|participant_id_sigma, v_1|participant_id_offset]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 0.136 31 1.00 draws/s 0:16:36 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 1000 0 0.208 15 1.03 draws/s 0:16:09 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 500 tune and 500 draw iterations (1_000 + 1_000 draws total) took 1006 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\nThe rhat statistic is larger than 1.01 for some parameters. This indicates problems during sampling. See https://arxiv.org/abs/1903.08008 for details\nThe effective sample size per chain is smaller than 100 for some parameters. A higher number is needed for reliable rhat and ess computation. See https://arxiv.org/abs/1903.08008 for details\n\r 0%| | 0/1000 [00:00, ?it/s]\r 0%|\u258f | 1/1000 [00:00<02:26, 6.82it/s]\r 5%|\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 52/1000 [00:00<00:03, 255.10it/s]\r 10%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 99/1000 [00:00<00:02, 341.78it/s]\r 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 141/1000 [00:00<00:02, 368.76it/s]\r 19%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 187/1000 [00:00<00:02, 398.14it/s]\r 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 233/1000 [00:00<00:01, 416.34it/s]\r 28%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 279/1000 [00:00<00:01, 427.36it/s]\r 32%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 324/1000 [00:00<00:01, 433.19it/s]\r 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 371/1000 [00:00<00:01, 442.77it/s]\r 42%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 417/1000 [00:01<00:01, 446.44it/s]\r 46%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 465/1000 [00:01<00:01, 454.22it/s]\r 51%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 511/1000 [00:01<00:01, 454.77it/s]\r 56%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 558/1000 [00:01<00:00, 457.52it/s]\r 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 604/1000 [00:01<00:00, 447.07it/s]\r 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 649/1000 [00:01<00:00, 446.54it/s]\r 70%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 696/1000 [00:01<00:00, 451.57it/s]\r 74%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 742/1000 [00:01<00:00, 449.06it/s]\r 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 787/1000 [00:01<00:00, 448.26it/s]\r 83%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 833/1000 [00:01<00:00, 449.77it/s]\r 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 878/1000 [00:02<00:00, 445.27it/s]\r 92%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 923/1000 [00:02<00:00, 437.25it/s]\r 97%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 968/1000 [00:02<00:00, 440.46it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 1000/1000 [00:02<00:00, 423.68it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "DpZg",
+ "code_hash": "9d932d1b67105a0ad5a0fb2d929bf12e",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "MXkk",
+ "code_hash": "9cb64fcf06758ae281400044ec722d2b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " rt response\n0 0.742448 -1.0\n1 1.626247 1.0\n2 1.289860 1.0\n3 1.338989 1.0\n4 1.291685 1.0\n.. ... ...\n495 0.952013 1.0\n496 0.906471 1.0\n497 1.467497 1.0\n498 0.814649 -1.0\n499 0.523454 1.0\n\n[500 rows x 2 columns]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "axEp",
+ "code_hash": "f76c639aeffbc264d28f5d13a6201903",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "mKKx",
+ "code_hash": "61ed45701803f4a369ee43735d5fafee",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "JWOG",
+ "code_hash": "6a7d078bda9c385a4c83dbc3d5852531",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "IWwC",
+ "code_hash": "a0621c22d5b26ed8e5d9a183391983a5",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [z, theta, t, v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.831 7 69.85 draws/s 0:00:28 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.741 7 33.15 draws/s 0:01:00 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 1_000 tune and 1_000 draw iterations (2_000 + 2_000 draws total) took 60 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/2000 [00:00, ?it/s]\r 7%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 147/2000 [00:00<00:01, 1468.82it/s]\r 15%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 305/2000 [00:00<00:01, 1532.22it/s]\r 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 459/2000 [00:00<00:01, 1437.87it/s]\r 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 604/2000 [00:00<00:00, 1407.18it/s]\r 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 746/2000 [00:00<00:00, 1370.60it/s]\r 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 884/2000 [00:00<00:00, 1344.91it/s]\r 51%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1019/2000 [00:00<00:00, 1296.51it/s]\r 58%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1153/2000 [00:00<00:00, 1308.09it/s]\r 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1298/2000 [00:00<00:00, 1348.15it/s]\r 72%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1436/2000 [00:01<00:00, 1354.67it/s]\r 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1577/2000 [00:01<00:00, 1369.26it/s]\r 86%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1728/2000 [00:01<00:00, 1408.65it/s]\r 94%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1879/2000 [00:01<00:00, 1437.63it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2000/2000 [00:01<00:00, 1394.76it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "HVQd",
+ "code_hash": "f585e30ace9f1afe32232c8f7bc7a999",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [z, theta, t, v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.788 7 27.23 draws/s 0:01:13 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.572 7 19.77 draws/s 0:01:41 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 1_000 tune and 1_000 draw iterations (2_000 + 2_000 draws total) took 101 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/2000 [00:00, ?it/s]\r 8%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 156/2000 [00:00<00:01, 1559.11it/s]\r 16%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 312/2000 [00:00<00:01, 1490.65it/s]\r 23%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 462/2000 [00:00<00:01, 1403.16it/s]\r 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 603/2000 [00:00<00:01, 1358.93it/s]\r 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 740/2000 [00:00<00:00, 1344.87it/s]\r 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 875/2000 [00:00<00:00, 1310.65it/s]\r 51%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1011/2000 [00:00<00:00, 1324.90it/s]\r 57%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 1144/2000 [00:00<00:00, 1318.98it/s]\r 64%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1281/2000 [00:00<00:00, 1334.30it/s]\r 71%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1425/2000 [00:01<00:00, 1363.17it/s]\r 79%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 1573/2000 [00:01<00:00, 1396.66it/s]\r 86%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1721/2000 [00:01<00:00, 1419.14it/s]\r 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1864/2000 [00:01<00:00, 1404.79it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2000/2000 [00:01<00:00, 1379.51it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "ipMD",
+ "code_hash": "4a659029736d249fb59814b6061d8fd6",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [t, z, v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.630 3 138.16 draws/s 0:00:14 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.801 1 70.60 draws/s 0:00:28 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 1_000 tune and 1_000 draw iterations (2_000 + 2_000 draws total) took 28 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n\r 0%| | 0/2000 [00:00, ?it/s]\r 7%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 146/2000 [00:00<00:01, 1456.82it/s]\r 15%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 295/2000 [00:00<00:01, 1473.18it/s]\r 22%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 443/2000 [00:00<00:01, 1468.65it/s]\r 30%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 591/2000 [00:00<00:00, 1468.17it/s]\r 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 746/2000 [00:00<00:00, 1496.86it/s]\r 45%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 899/2000 [00:00<00:00, 1505.84it/s]\r 52%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1050/2000 [00:00<00:00, 1492.89it/s]\r 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 1200/2000 [00:00<00:00, 1488.30it/s]\r 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1353/2000 [00:00<00:00, 1500.36it/s]\r 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1510/2000 [00:01<00:00, 1520.51it/s]\r 84%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1672/2000 [00:01<00:00, 1549.14it/s]\r 92%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1840/2000 [00:01<00:00, 1586.69it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 2000/2000 [00:01<00:00, 1530.32it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "XAze",
+ "code_hash": "9b6d00b5aa56ea616b99261f94b7b7b5",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "eYRD",
+ "code_hash": "eaee07c5773d39b5dd230239df179969",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/vnd.marimo+mimebundle": "{\"image/png\": \"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAABVgAAAKeCAYAAABQ7HD2AAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjcsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvTLEjVAAAAAlwSFlzAAAewgAAHsIBbtB1PgAA2TNJREFUeJzs3XdYlfX/x/HXQZYCKuIeYJri1nJrKubMibkbqFjOyrJdlru+34aaDbVy4M5Vapl7b82Vewvm3gyVdf/+4Mv9A4EDHOGA9nxc17mu+z73Z7zPoOTN535/LIZhGAIAAAAAAAAApJtDVgcAAAAAAAAAAI8qEqwAAAAAAAAAYCMSrAAAAAAAAABgIxKsAAAAAAAAAGAjEqwAAAAAAAAAYCMSrAAAAAAAAABgIxKsAAAAAAAAAGAjEqwAAAAAAAAAYCMSrAAAAAAAAABgIxKsAAAAAAAAAGAjEqwAAAAAAAAAYCMSrAAAAAAAAABgIxKsAAAAAAAAAGAjEqwAAAAAAAAAYCMSrAAAAAAAAABgIxKsAAAAAAAAAGAjEqwAAAAAAAAAYCMSrAAAAAAAAABgIxKsAAAAAAAAAGAjEqwAAAAAMt2wYcNksVhksVg0bdq0TJ/v7Nmz5nx+fn6ZPh8ebevXrze/Lz179szqcAAAjxgSrAAAAEAWKlmypJnYsVgs8vT01L1799Lc/8svv0zU314JTAAAAMQhwQoAAABkI7du3dKiRYvS3H7q1KmZGA0AAABSQ4IVAAAAyCYsFouktCdNt23bpiNHjkiSHBz4pz0AAEBW4F9hAAAAQDbx7LPPSpLWrl2r4ODgVNtPmTJFkpQjRw41atQoU2MDHmd+fn4yDEOGYVBiAwCQbiRYAQAAgGwiMDBQkhQbG5tqkiciIkK//PKLJKlFixYqWrRoZocHAACAZJBgBQAAALKJWrVqqWLFipKkadOmyTCMFNvOnz9foaGhkqRevXqle679+/dr0KBBqly5sjw9PeXq6qrixYurdevW+vHHHxUZGZnmsS5cuKD3339fFSpUkLu7uzw9PVWtWjUNHz5cly9fTndsknT69GkNGTJEtWvXVqFCheTs7KwCBQqoXr16GjVqlK5fv27TuBlhzZo16t+/vypVqiQvLy85OTnJ09NTNWrU0Ouvv65Vq1ZZ/ewkadOmTerTp4/KlSun3LlzK2fOnPLx8VGnTp00Z84cxcbGWu2f3K73YWFh+vrrr1W7dm3lz59f7u7uqlq1qkaNGqU7d+4k6m8Yhn7//Xe1bdtWPj4+cnFxUfHixdWjRw8dP3483XPfvn1bX375pWrVqqX8+fMrZ86cKlOmjF577bVUx4u3f/9+ff7552rdurWeeOIJubm5ycXFRYULF5afn59Gjx6ta9eupTrOsGHDkmz4dvbsWX388ceqVq2avLy8ZLFY5O/vb/U1JScmJkazZs3S888/b8bo6OioPHnyqEKFCurSpYt++uknnTt3LtU4f//9d7388ssqXbq03Nzc5O7urtKlS+vll1/WH3/8kWr/adOmmTEPGzZMknTnzh19/fXXqlWrlry8vOTq6qpSpUrp1Vdf1bFjx1IdEwBgIwMAAABAlvHx8TEkGZKMEydOGF9//bV5vmbNmhT7NWrUyJBk5M+f37h//77x4osvmv2mTp2aYr+oqChj4MCBhoODg9k+uccTTzxh7N69O9X4Fy9ebOTJkyfFcQoVKmRs3LjRGDp0aJrii4mJMd5//33DycnJany5c+c2Fi1alOI4Z86cMds2atQo1deRFufOnTMaNGhgNa74x/jx45MdIywszOjUqVOq/atWrWqcOnUqxVjWrVtntu3Ro4dx7Ngxw9fXN8XxfH19jUuXLhmGYRihoaFGy5YtU2zr6upqrF69Os1zHzx40ChdurTV8X766Ser7+1LL72UpvfV3d3dmD9/vtWxHvyuzZkzx3B3d08yVvv27VN8Tck5f/68Ua1atTTFWb169RTju3z5svnza+3x7LPPGlevXk1xnKlTp5pthw4dauzbt8/q5+Ds7GwsWLDA6nsHALCNY9KUKwAAAICs8vLLL+uDDz5QVFSUpk6datZlTejUqVPauHGjJOnFF1+Us7Nzmsfv3r27FixYYJ7XqFFDTZo0kbu7u44fP67Fixfrzp07OnPmjPz8/LRu3TrVqFEj2bHWrl2rTp06KSoqSpKUP39++fv7q2TJkrpx44Z+//13HT9+XP7+/mrfvn2qsRmGoS5dumjhwoWS4jbu8vPzU82aNZU3b15dv35da9as0d69e3Xnzh116tRJCxcuTLQSMbMcPXpUjRo10pUrV8zY6tevr9q1aytfvnwKCwvT4cOHtWHDBt28eVMxMTFJxoiMjFSzZs20bds28zk/Pz/Vr19fzs7OOnjwoJYuXap79+5p//79qlevnnbs2CEfHx+rsd2+fVtt2rTRiRMnVLFiRbVo0UJeXl46deqUfvnlF4WHh+vYsWPq3r27Vq9erY4dO2rlypUqXLiw2rVrJx8fH127dk0LFixQSEiI7t27p27duunYsWPKly9fqnO3a9dOp0+flre3t9q1a6ciRYrowoULWrx4sc6fP6979+6pT58+cnNzU/fu3ZMd5+rVq5LivkN169aVr6+vPD09FRMTo+DgYK1atUrnzp1TWFiYunbtqlWrViX7s/GgLVu2aNq0aYqOjlbNmjXl5+enPHnyKCQkJNW+CcXGxsrf31/79u2TJHl4eKhly5YqV66c3NzcFB4errNnz2r37t3mxnPJuXnzpurXr6+TJ09KkhwdHdWiRQtVr15dkvTXX39pxYoVio6O1tq1a/XMM89ox44dypMnj9X4zp8/r5YtW+rSpUuqVKmSmjVrpgIFCuiff/7RokWLdPHiRUVGRurll19WtWrVVLp06XS9fgBAKrI6wwsAAAD8mz24gtUwDKNDhw6GJCNnzpzG7du3k/T5+OOPzT779u0zDMNI0wrWiRMnmm2cnJyMmTNnJmlz6dKlRKs0y5QpY9y9ezdJu7CwMMPb29ts16pVK+PmzZuJ2kRHRxuffPKJIcmwWCypxvf555+bbWrWrGkcO3Ys2XZz5swxnJ2dDUlG3rx5jevXrydpk5ErWCMiIoxy5cqZ41WsWNHYv39/sm2joqKMRYsWGX/++WeSax988IE5hoeHh7Fq1aokbU6ePGmUL1/ebNegQYNk50m44jL+/R07dqwRGxubqN3Ro0eNfPnyme3ivycBAQFGeHh4orahoaFGzZo1zbajR49Ode74z7VPnz7GvXv3ErW7e/euERgYaLbNmzevceHChWTH/Oqrr4w1a9YYMTExyV6PiYkxxo4da668fvLJJ1Nsm3AFq/63gja1lZuprWBds2aNeb1GjRrGtWvXUhzr1KlTKa7Y7datmzlO0aJFjT179iRp89dffxmFCxc227300kvJjpVwBaskw9HR0fjxxx+TtLtz545Rr149s12fPn1SjB0AYBsSrAAAAEAWSi7BumTJEvO5SZMmJWofExNjFC9e3JBkPP300+bzqSVYo6KizH6SjG+++SbFmG7fvm0UK1bMbJtc0ub77783rz/55JPJJmGTiy2l+K5fv264ubkZUlx5guQSywmNHz/eHG/EiBFJrmdkgnXcuHHmWCVKlEg2oZuaGzduGLly5TLHWbx4cYptz507Z74XkoyVK1cmafNggvWdd95JcbzPPvssUdu6deummJzcvHlzokRich6cu3HjxkkSu/FiYmISJew//PDDFONMizfeeMMcK6UyBg8mWFMrT/Dga0ouwfrVV1+Z1229zf7IkSNmQjpHjhzGX3/9lWLbnTt3mslki8WS7B8bHkywfvnllymOd/jwYbNd/vz5bYofAJAyNrkCAAAAspnnnntOhQsXliRNmTIl0bWVK1fq/PnzktK3udXatWvNfiVKlNBrr72WYtvcuXNryJAh5nlQUFCSNjNnzjSPP/nkE7m6uqY43ujRo2WxWKzGFxQUpPDwcHO83LlzW23ft29fubu7S5KWLFlite3DmjBhgnn8xRdfpHrbfHLmz5+viIgISVL9+vXVrl27FNt6e3sn+nySe/8TcnZ21gcffJDi9VatWiU6//jjj+XgkPyvgvXr15enp6ck6e+//0621MGDRo0aleLn6+DgoJEjR5rnqb2W1CQsMRBfJsOaEiVKKDAw8KHmlJTofYj/HNNr+vTp5uZn3bp109NPP51i25o1a6pz586S4kpnTJ8+3erYBQoU0BtvvJHi9fLly5sb6F27ds38bwEAIGOQYAUAAACyGUdHRwUEBEiSduzYocOHD5vX4hOuLi4uevHFF9M85pYtW8zj559/PsUEW7wuXbqYx7t379b9+/fN8/v37+uvv/6SpCS7sSfHx8cnxTqu8dauXWset2jRwmpbKS6pWL58eUnS3r1705QItMU///xj7r7u7u6ujh072jROwve/U6dOqbZP+P5v3rzZatvq1avLy8srxeulSpUyjx0dHVOtXfrEE09Iivucb926ZbVtoUKFVK9ePattGjZsaMZ34cIFnTt3zmr7a9euafXq1Zo6darGjx+vr776ynwkTKbHfy7WPPfcc6l+19OiatWq5vEnn3xifv/TIzO/A02aNEm1FnPZsmXN4/hawgCAjMEmVwAAAEA2FBgYqC+++EKSNHXqVH355Ze6ceOGmWDy9/c3VxqmxYkTJ8zjp556KtX2+fLlk4+Pj86dO6f79+8rODhYZcqUkSSdPXtWkZGRkuKScamtNpXiElS7du1K8fr+/fvN42LFiqU6XkIxMTG6efOm8ufPn65+aZFww6KnnnpKTk5ONo2T3ve/cuXKcnR0VHR0tM6dO6eoqKgU5/b29rY6VvxKX0ny8vJSzpw509w+PDzcavI2YeIxJRaLRZUrV9b69eslxSVGk9u4a/Xq1Ro5cqQ2bdpkrvS0JrXkrySVK1cu1TZp0axZMz311FPau3evzp07pxo1aqhKlSpq1qyZ6tWrpzp16qho0aJWx0jvdyDhCteEfZOT2kZoUtzGXPHCwsJSbQ8ASDtWsAIAAADZkK+vr7kycMaMGYqOjtasWbPMlaTpKQ8gxe1eHi+ticiE7W7cuJHsWNaSbymNlZzr16+naZyUxJcXyGgJ4ypYsKDN46T3/Xdyckq0c3zC9/9B1sozSEp0+35qbR9sHxsba7WtLZ9/wvci3vDhw9WsWTNt3LgxTclVSbp3716qbRK+hw/DwcFBy5YtU9OmTc3nDhw4oK+//lodO3ZUsWLFVL58eQ0bNkxXr15Ndoz0fgdS+vlLTkZ/rgCA9GEFKwAAAJBNBQYGauvWrbp8+bKWLVtmlgcoXry4mjVrlsXRZazo6Gjz+PPPP5ejY/p+VUnPal5kL6tXr9awYcPM865du6pz586qXLmyChcurJw5c5qrd8+cOZOo5EFqMqI8QLzChQtr1apV2rZtm+bNm6cNGzbowIEDZnmKo0ePavjw4RozZoyCgoLUoUOHDJsbAJC9kWAFAAAAsqkuXbpo0KBBCg8P14cffmjWYu3Ro0e6E0cJE5BpXS167do18zjhxk4PO1ZyvLy8dPHiRUlxieWHWS2akRKu0HyYupXpfc+ioqJ0+/Zt89yWjbXswZbP/8Fk+Pjx483jMWPG6K233kpxnITvSVapW7eu6tatK0m6c+eOtm7dquXLl2v27Nm6evWqQkND1bVrV+3du9fcWEqKe93x3/Hr16/Lzc3N6jwp/fwBALIfSgQAAAAA2ZSHh4e5GU7Cja7SWx5Aklk/VYrbFCo1N27cMDcjcnFxSVTjsWTJkuaGOmfOnNGdO3dSHS9hjdXkJKyVmVpbe0qYINu7d6+ioqJsGie97//BgwfNVb0lS5a0ufZrZkvLZ2UYhv7++2/z3NfXN9H17du3S4q7zX3gwIFWxzp48KANUWae3Llzq2XLlho3bpxOnz6t2rVrS4pLkP/000+J2qb3O5CwTcINqgAA2Q8JVgAAACAbCwwMTHTesGFDlS5dOt3j1K9f3zz+9ddfU63BuGDBAvO4Ro0aiXYod3FxMTfgMQxDixcvtjpWcHCwdu/ebbVNwpIHc+fOtdrWnooUKWImf8PCwrRw4UKbxkn4/id8b1Myf/78ZPtmN5cvX9a2bdusttm0aZO50rVo0aJJNmSKr03q7u6e6HuWnLS8d1nF3d090erbY8eOJbr+uH4HAAAkWAEAAIBsrWHDhnr33Xc1cOBADRw4UJ9++qlN4zz77LMqUaKEJOncuXOaMGFCim1DQ0M1atQo87xnz55J2rz88svm8ahRo8zNt5LzySefpLpxUWBgoLm7/fTp07Vp0yar7RNK66ZIturfv795/N5776W64VByOnfurFy5ckmSNm/erD/++CPFtufPn9d3331nnif3/mcn1j5fwzASfWcDAgKStIkvw3Dt2jUFBwenOM/atWu1ZMmSh4zWfuK/z/ECAgLMjabmzp1rdfXvnj17NG/ePElxm1P16NEj8wIFADw0EqwAAABANvfFF1/ou+++03fffacmTZrYNIajo6OGDBling8ePDjZlaJXrlxR27ZtFRISIinutuaXXnopSbuAgAAzYXv8+HF17tw5SamA2NhYDR8+XNOnT0+0g3lyChUqpI8//lhS3IZXrVu31owZM1JcaRsbG6stW7bo5ZdfTlTDMzO8+uqr5irWkJAQNWzYUAcOHEi2bVRUlBYtWqTly5cnet7T01ODBg0yz1988UWtXbs2Sf/Tp0+rZcuWCg0NlSQ1aNAg0c712Y3FYtGaNWs0cOBARUZGJrp2//599e3bVxs2bJAk5cmTR6+//nqSMRo2bGge9+7d23ztCf355596/vnnMzj6tHvzzTf1zjvv6NChQym2+eeffxL9YSLh65LiymB069ZNUtx3vG3btskmWffu3au2bduam2e9+OKLicoLAACyHza5AgAAAP4l+vTpo1WrVmnBggWKjIxU9+7dNXbsWD377LNyd3fXiRMn9Ntvv5kbCbm7u2v27NlydXVNMpa7u7smT56sVq1aKTo6WkuXLtWTTz6pDh06yMfHRzdu3NAff/yho0ePytPTU/7+/po6darV+D766CMdOXJEs2bNUmhoqAICAvTJJ5+oSZMm8vHxkZOTk27evKmjR49q586dunz5siSpatWqGf9mJZAzZ04tXLhQjRo10rVr13To0CE99dRTeuaZZ1SnTh15enoqNDRUR44c0fr163Xz5k2NHTtWLVu2TDTOsGHDtH79em3btk23b99W06ZN5efnp/r168vZ2VkHDx7U0qVLdffuXUlxSecZM2Zk6mt7WO3bt9f+/fs1YcIELVu2TO3bt1ehQoV08eJFLV682EzUWywWfffddypatGiSMd59910tWrRIMTExWr16tUqVKiV/f3+VLFlSd+7c0YYNG7Rjxw5J0qeffqoRI0bY9TVK0q1btxQUFKSvv/5apUuXVq1atfTEE0/Iw8NDN27c0NGjR7V8+XKzRm/p0qX1yiuvJBnn+++/165du3Ty5EmFhISoRo0aatmypapXry5J+uuvv7R8+XKz/q6vr2+i1cwAgOyJBCsAAADwLzJnzhwVLFhQEyZMkGEY2rlzp3bu3JmkXcmSJTV//nzVqFEjxbGaNWumefPmqWfPnrpz546uXr2qH3/8MVGbAgUKaMGCBcmu1nyQxWLRjBkzVKVKFY0cOVJhYWE6d+6cpkyZkmIfd3d3lSxZMtWxH1aFChW0Y8cOvfDCC9qxY4diY2O1ceNGbdy4Mdn2yW1K5ezsrFWrVqlHjx5auHChDMPQunXrtG7duiRtq1atqkWLFiWpV5rd5MmTR0uWLFG7du105syZZFcTu7i46Jtvvkl2JbQk1axZUz/++KP69eunqKgoXbt2TT///HOiNk5OTvrss8/UqVOnLEmwJqwNe+rUKZ06dSrFtjVr1tSCBQvk7u6e5Jqnp6e2bNmizp07a+PGjYqOjtbvv/+u33//PUnbxo0ba968ecqTJ0/GvAgAQKYhwQoAAAD8izg6Our777/Xq6++qsmTJ2vt2rX6559/dPfuXeXPn19Vq1ZV+/bt1bNnT7m4uKQ6XocOHVSrVi2NGzdOv//+u4KDg+Xk5CRvb2+1b99eAwYMUJEiRdKUYJXikqzvvfeeevfurWnTpmnNmjU6ePCgrl27pujoaOXJk0elSpVStWrV1KxZMz333HNyc3N72LclTUqVKqXt27frjz/+0IIFC7RlyxZdunRJERERyp07t5588knVrVtXzz//vBo1apTsGG5ublqwYIE2btyo6dOna+PGjbp48aKioqJUsGBB1axZUx07dlS3bt3k4PBoVHSrVKmS9uzZo4kTJ2rhwoU6ffq0IiIiVKxYMTVv3lyDBg2Sr6+v1TECAwNVs2ZNjR07VuvWrdOFCxeUM2dOFS1aVE2bNtUrr7yiKlWq6OzZs/Z5UQ+YOHGievToobVr12r79u06duyYLl++rIiICOXKlUvFixdX9erV1blzZ7Vr185qSYyCBQtqw4YNWrp0qebOnatt27aZq7ELFSqkevXqqVu3bmrTpo29Xh4A4CFZjMyuCA8AAAAAeGysX79ejRs3liT16NFD06ZNy9qAAADIYo/Gn0QBAAAAAAAAIBsiwQoAAAAAAAAANiLBCgAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANiLBCgAAAAAAAAA2shiGYWR1EAAAAAAAAADwKGIFKwAAAAAAAADYiAQrAAAAAAAAANiIBCsAAAAAAAAA2IgEKwAAAAAAAADYiAQrAAAA8Jg6e/asLBaLLBaL/Pz8MmRMPz8/c8yzZ89myJjZcc7sYtiwYeZrnzZtWlaHY8ZSsmTJrA4FAIBswzGrAwAAAAAAICMMGzZMkpQ3b169+eabqbZfv3691q9fL0ny9/dXtWrVMi02AMDjiwQrAAAAAOCxMHz4cEmSj49PmhOs8X1KlixJghUAYBNKBAAAAAAAAACAjVjBCgAAACDN4m+nhn0MGzbMvO0dAABkT6xgBQAAAAAAAAAbkWAFAAAA/kUMw9Ds2bPVokULFS1aVC4uLipSpIg6dOiglStXptrfz8/P3En+7NmzVtveuHFDQ4cOVbVq1ZQ7d27lzp1bFStW1HvvvWf2nTZtmjleelZqHjhwQP369VOZMmWUK1cu5c2bV7Vq1dKXX36pu3fvpnmc+/fv6+eff5a/v798fHyUK1cueXh4yNfXV3369NGOHTtSHaNkyZLma4i3fPlyde/eXaVLl1auXLlksVj022+/pTmueMOGDTPHnjZtWortbt68qa+//lpNmjRRkSJF5OLiImdnZ3l5eenpp59W3759tWDBAt26dSvdMVgTGxurWbNmqUWLFipWrJhcXV1VvHhxdenSRWvWrEnXWJcvX9bo0aPVqFEj87vp5eWl6tWr68MPP9T58+eT7Xf27Nkk7/+5c+fM5xI+/Pz8JEk9e/aUxWIx669KUq9evZLtY23V9ubNmzVgwABVrFhR+fLlk4uLi4oVK6bWrVtr8uTJioqKsvqak/t8z549q48//ljVqlWTl5eXLBaL/P390/QeAgCyiAEAAADgsXTmzBlDkiHJaNSokXH9+nWjWbNm5nPJPQYPHmx1zEaNGpltz5w5k2K7DRs2GAULFkxxHg8PD2PRokXG1KlTzeeGDh2apjnHjBljODk5pTh2xYoVjYsXL6b6/qxevdooUaKE1fdDkhEYGGhERkamOI6Pj4/Z9v79+8aLL76Y7Di//vprqjE9aOjQoWb/qVOnJttm/fr1Rv78+VN9HZKMt99+O90xJBQ/jo+Pj3Hr1i2jefPmVufr06ePERMTk+q4Y8eONdzc3KyO5erqanz//fdJ+ib8nqf2aNSokWEYhtGjR48091m3bl2SOa9fv260adMm1b5ly5Y1jhw5kuLrfvDznTNnjuHu7p5knPbt26f1IwIAZAFqsAIAAAD/AtHR0erUqZPWrVunwoULq23btvLx8VFoaKj+/PNPHThwQJI0ZswYVa9eXS+88ILNc+3bt0+tW7dWWFiYJClPnjxq3769ypQpo7CwMK1bt047d+5U9+7d1a9fv3SNPXnyZI0aNUqOjo5q06aNnn76aTk5OWnfvn367bffFBMTo0OHDqlHjx5asWJFiuMsXLhQ3bt3N1cYPvnkk2ratKmKFy+u6Oho7d+/X3/88YciIyM1ZcoU3bp1SwsXLkw1vkGDBmnWrFnKmTOnWrdurYoVKyo2Nlb79++Xk5NTul5rWpw/f15t27ZVaGioJKl48eJq0aKFfHx85OTkpNu3b+v48ePatm2bLl68mKFz9+7dWytXrlTu3LnVvn17lS1bVqGhoVq1apX27t0rSfrxxx9lsVg0ceLEFMcZPHiwxo4da57XrVtXdevWVf78+RUaGqrNmzdr06ZNunfvngYOHKjo6Gi98cYbZvt8+fLpyy+/lCS9++67kiRPT0999NFHSeYqUaKEJKlbt26qVKmSVq5cqVWrVkmSunbtqho1aiTpU7p06UTnV69e1TPPPKPjx49Lktzd3dWyZUtVqFBBrq6uCg4O1h9//KGQkBAdP35c9evX119//aWSJUtafT+3bNmiadOmKTo6WjVr1pSfn5/y5MmjkJAQq/0AANlAVmd4AQAAAGSO5Fb29evXz4iIiEjULjY21hg0aJDZxtfXN8UxU1vBGh0dbVSrVs1sU79+fePSpUtJ2s2YMcNwdHQ0LBZLulawysqqwM2bNxs5c+Y0223dujXZ8Y4fP26uEnRzczNmzZplxMbGJml3+vRpo2rVquZ4kydPTna8hCtYJRnVq1c3goODk22bXqmtYP3kk0/M66+++qoRFRWV7DixsbHG1q1bjaVLlz5UPPFzxX9uderUSXa18IQJExJ9tsuWLUt2vDlz5phtnnzySWPnzp3Jtlu9erXh6elpSDKcnJxSXBUaP5aPj0+aXk9aVgg/qEWLFmafgIAA49atW0naREZGGm+//bbZrkGDBqnOr/+t0l2wYEGa4gAAZB/UYAUAAAD+JVq3bq0JEyYoZ86ciZ63WCz68ssvzdV9x44d06FDh2yaY9myZdq3b58kqUCBAlq6dKkKFSqUpN1LL72kkSNHyjCMdI3v7u6u5cuXq1y5ckmu1a9fP9HKxkWLFiU7xqeffmqurp07d65eeOGFRPU74z3xxBNatmyZ3N3dJUmff/55qvHmy5dPy5YtM9/LzBa/UlSKi8/RMfmbFC0Wi+rWras2bdpkyLyGYSh//vxaunSpChcunOR6v379Eq0gHTlyZJI20dHRev/99yVJefPm1bp161SzZs1k52vSpImCgoIkSVFRUeaKVXtbsWKFuTK6U6dOCgoKUp48eZK0c3Jy0ldffaXnn39ekrRp0yZt2rQp1fG//fZbdezYMWODBgBkOhKsAAAAwL/EqFGjUrzm5OSkdu3amecJE3fpMXPmTPP4rbfekqenZ4pt33rrLeXNmzdd4/fu3VtPPPFEitc7depkHif3Gq5evar58+dLkho2bJhqwrFo0aLq3r27JOnkyZM6fPiw1fb9+/dXwYIFrbbJSDExMeZxRESE3eaV4j6//Pnzp3j9ww8/lJubmyRp27ZtOnHiRKLrS5cuVXBwsCTpjTfeUPHixa3O17ZtW5UpU8bsmxV++OEHSXEJ6//+97+ptn/77bfN4yVLllhtW6JECQUGBj5cgACALEGCFQAAAPgXKFy4sKpVq2a1TdmyZc3jK1eu2DTPtm3bzOP27dtbbevi4qKWLVuma/znnnvO6vXUXsPGjRvNpGSLFi3SNGfC92337t1W27Zu3TpNY2aUqlWrmse9e/fWhQsX7DZ3hw4drF53c3NL9B5v3bo10fW1a9eax+n9LK5evapz586lMdKMERsbqw0bNkiKq9lbqlSpVPuk57vz3HPPycGBX9EB4FHEf70BAACAfwEfH59U23h4eJjH8bfQp8f9+/fNDXmcnZ3l6+ubap/KlSuna47UXkdqr2H//v3m8ccffyyLxZLqY+DAgWafq1evWp0/udIFmalfv37KnTu3JGnVqlXy8fHRs88+q5EjR2rVqlXm5lcZzdXVNU2fb5UqVczjY8eOJbqW8LOoX79+mj6L+NXHUuqfRUY7e/asbt++LUk6ceJEmuKNX8Gblnjt/d0BAGQcEqwAAADAv4Crq2uqbRLWIY2NjU33HDdv3jSP8+TJoxw5cqTax8vLK11zpPY6UnsN169fT9d8DwoPD7d6Pbl6nJnJx8dHK1asMHe6j46O1rp16/Tpp5+qefPm8vT0VIMGDfTTTz8pMjIyw+b19PRM02rLhCUEEn4/pMz/LDLa4/bdAQBknOQroAMAAADAYyg6Oto8fv7551W3bt109a9Xr57V61lxi3edOnV09OhRLV26VEuWLNHGjRt1+vRpSXE1Wjdv3qzNmzfryy+/1OLFi1W+fHm7x5ichJ/Fu+++m+7atWm5RT8jJYzX29tbr7/+err6p5ZApTwAADy6SLACAAAAyBAJN6y6ffu2YmJiUl3F+rCrAtMr4YrZ2rVr65133rHr/JnF0dFRHTp0MOuiXrhwQRs3btTSpUu1cOFC3b9/XydOnFCbNm10+PBhubi4PNR8N2/eVGxsbKpJwWvXrpnHD254lvCz6NSpk2rVqvVQMWW2hPG6ubk9Nt8dAMDD409kAAAAADKEq6uruRN8ZGRkkpqbyfn7778zO6xEEta5TFgD9HFTtGhRdevWTbNmzdL+/fuVL18+SdLp06f1559/PvT49+7d0/Hjx1Ntd+DAAfP4wZqtj9pn4ePjo5w5c0qSTp48qYiIiCyOCACQXZBgBQAAAJBhEt5Cv3jxYqtt79+/rxUrVmR2SIk0bdrUrNO6bNmyTNsEKjvx9fVVly5dzPO0JL7T4tdff7V6PTw8XCtXrjTPHyyv0KxZM/N47ty5GRKTo2PcTZoxMTFpau/k5GQep9bHxcVFDRo0kCRFRUVp4cKFNkYJAHjckGAFAAAAkGFefPFF83jcuHFJNjZKKLXrmaFo0aLy9/eXJN26dUvvvvtumvsahpFJUdlX/CrMhzV27FjduHEjxev//e9/FRYWJimuTmyZMmUSXe/QoYOKFi0qSVq7dm26kqwpfRbxdU7T+r1KWBfV2muJN3DgQPP4448/1tWrV9M0j/T4fH8AAEmRYAUAAACQYVq3bq2qVatKkq5cuaJ27drpypUrSdrNmjVLQ4YMMVeT2tNnn30md3d3SdKkSZMUGBiYqFbog0JCQvSf//xHLVq0sFeIafbss89q7NixunTpUoptNm/erJkzZ5rnDRs2fOh5LRaLrl69qrZt2+ry5ctJrv/8888aPXq0eT5kyJAkbVxdXfXFF1+Y5z169NCYMWMUGRmZ4rx79+7Va6+9pvfffz/Z6/FlCMLDw7Vz585UX0fCsgXr1q1LtX27du3MlbchISF65plntG3bthTb3717V4sWLVKjRo20Z8+eVMcHADya2OQKAAAAQIbJkSOHpk6dqmeeeUYRERHavHmzypYtK39/f5UpU0ZhYWFau3atdu7cKRcXFw0YMEDjx4+XJLslW8uVK6fZs2erc+fOun//vqZOnaq5c+eqSZMmqlKlinLnzq2wsDCFhIRoz549Zp3YihUr2iW+9Dh9+rQGDx6sd955R0899ZSeeuopFStWTM7Ozrpy5Yp27Nih7du3m+27du2qatWqPfS83t7eql69uhYtWqSyZcuqQ4cO5ue7cuXKRMnEV155Ra1bt052nBdffFGHDh3S559/rsjISL399tv64osv1LRpU5UuXVo5c+bU7du3dfLkSe3cuVPBwcGSEq8kTah169baunWrJKlt27Z68cUX5ePjY262VqxYMXMjMEl65plnlCdPHt2+fVt//vmnWrZsqUaNGsnDw8Ns06FDBxUrVsw8nzt3rho2bKhDhw7p+PHjqlevnp5++mnVr19fhQsXVkxMjK5du6a///5bO3fuVHh4uCRWsALA44wEKwAAAIAM9dRTT+mPP/5Q586dde3aNd2+fVtBQUGJ2ri7uysoKCjRrdwZdet6WrRt21abNm1Sjx49dOTIEd29e1e///67fv/99xT7VKpUyW7xpZWzs7MkKTY2Vn/99Zf++uuvFNsGBAToxx9/zLC5p0yZojt37mj16tVJPt94gYGBmjhxotVxPvvsM5UrV06DBw/W9evXdfnyZc2aNSvF9s7OzknKDcR7/fXXNWPGDB09elRXrlzR2LFjE11v1KhRogRrzpw59cUXX6hfv34yDEMrVqxIUhe4UqVKiRKs+fLl0/bt2zVw4EDNnDlTsbGx2rNnj9UVqsWKFZOnp6fV9wEA8OgiwQoAAAAgw/n5+eno0aMaN26clixZotOnT0uSSpQooZYtW2rgwIEqXbq0xowZY/ZJWA/THmrWrKmDBw9qyZIlWrJkibZt26ZLly4pNDRUuXLlUrFixVShQgU1bNhQbdq0UenSpe0aX1rs27dPa9as0fr16/XXX3/p1KlTunbtmqKjo+Xh4aFSpUqpXr16CggIUI0aNTJ07jx58mjFihWaMWOGZs6cqUOHDun69evKnz+/6tatq379+qlp06ZpGisgIEAdO3bUjBkztHLlSu3du1dXr17V/fv3lTt3bvn4+KhKlSpq0qSJWrdurXz58iU7joeHh3bs2KFvv/1Wf/zxh44dO6Y7d+4oOjo6xbn79OkjX19fTZo0STt27NClS5cUERFhNd74PxB89NFHCgoK0oYNG3Tq1CnduHFDOXLkkJeXl8qWLatatWqpRYsWatSokRwcqNAHAI8ri8F9CgAAAACyyKuvvqqff/5ZkrR8+fJsWecUAADAGhKsAAAAALJETEyMSpYsqfPnz0uK2xSrQIECWRwVAABA+nCPAgAAAIAsMXHiRDO5Wr9+fZKrAADgkUSCFQAAAECG6927t44cOZLstdjYWE2aNEmDBw82nxs0aJC9QgMAAMhQlAgAAAAAkOEcHR0VExOjqlWrqm7duuYu7CEhIVq9erW56ZUkderUSfPnz8+qUAEAAB4KCVYAAAAAGS4+wZqawMBATZgwQc7OznaICgAAIOORYAUAAACQ4Xbv3q0lS5Zo27ZtOn/+vK5evarbt28rd+7cKlGihBo2bKiePXvq6aefzupQAQAAHgoJVgAAAAAAAACwEZtcAQAAAAAAAICNSLACAAAAAAAAgI1IsAIAAAAAAACAjUiwAgAAAAAAAICNSLACAAAAAAAAgI1IsAIAAAAAAACAjRyzOgAAwL/TvXv39Pfff0uSChQoIEdH/pcEAAAAAMg80dHRunr1qiSpcuXKcnV1zZBx+W0WAJAl/v77b9WqVSurwwAAAAAA/Avt3LlTNWvWzJCxKBEAAAAAAAAAADZiBSsAIEsUKFDAPN65c6eKFCmShdEAAAAAsCY8PFyTJk0yz/v27Ss3N7csjAhIv4sXL5p3Uib8nfRhkWAFAGSJhDVXixQpouLFi2dhNAAAAACsCQsLU548eczzYsWKyd3dPQsjAh5ORu4DQokAAAAAAAAAALARCVYAAAAAAAAAsBEJVgAAAAAAAACwETVYAQAAAAAAYFWuXLn09ttvJzoHEIcEKwAAAAAAAKxycHBgUysgBZQIAAAAAAAAAAAbkWAFAAAAAAAAABuRYAUAAAAAAAAAG1GDFQAAAAAAAFZFRERoxowZ5vnLL7/MRlfA/5BgBQAAAAAAgFWxsbG6dOlSonMAcSgRAAAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANiLBCgAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANiLBCgAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANiLBCgAAAAAAAAA2cszqAAAAAAAAAJC9ubu7a+jQoVkdBpAtsYIVAAAAAAAAAGxEghUAAAAAAAAAbESCFQAAAAAAAABsRIIVAAAAAAAAAGzEJlcAAAAAAACwKjw8XBMnTjTP+/XrJzc3tyyMCMg+SLACAAAAAADAKsMwFBYWlugcQBxKBAAAAAAAAACAjUiwAgAAAAAAAICNSLACAAAAAAAAgI1IsAIAAAAAAACAjUiwAgAAAAAAAICNSLACAAAAAAAAgI1IsAIAAAAAAACAjUiwAgAAAAAAAICNSLACAAAAAAAAgI0cszoAAAAAAAAAZG8Wi0Xu7u6JzgHEIcEKAAAAAAAAq9zc3PT2229ndRhAtkSJAAAAAAAAAACwEQlWAAAAAAAAALARCVYAAAAAAAAAsBEJVgAAAAAAAACwEZtcAQAAAAAAwKqwsDB9/fXX5vnbb78td3f3LIwIyD5YwQoAAAAAAAAANiLBCgAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANiLBCgAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANiLBCgAAAAAAAAA2IsEKAAAAAAAAADYiwQoAAAAAAAAANnLM6gAAAAAAAACQvTk4OKhw4cKJzgHEIcEKAAAAAAAAq3LlyqW+fftmdRhAtsSfGwAAAAAAAADARiRYAQAAAAAAAMBGJFgBAAAAAAAAwEbUYAUAAAAAAIBVsbGxioiIMM9z5crFRlfA/5BgBQAAAAAAgFURERH6+uuvzfO3335b7u7uWRgRkH3wpwYAAAAAAAAAsBEJVgAAAAAAAACwEQlWAAAAAAAAALARCVak28yZM9W0aVMVKFBAjo6OslgsslgsWr9+vSRp2LBh5nNnz57N0lgz0uP6ujLa2bNnlTNnTlksFq1YsSKrw0E6GYahypUry2Kx6KOPPsrqcAAAAAAAyPZIsCJd+vTpo5dffllr1qzRtWvXFBMTk9UhPTYiIyO1e/duTZgwQYGBgapcuXKyCeyMMm3aNHPstDz27duXpnHfeust3bt3T3Xr1lWLFi0yNGbYZv369Ro2bJiGDRuW6h8HLBaLPvnkE0nSmDFjdOrUKTtECAAAACA7i4mJ0aZNm7Rv3z7t3LlT+/bt06ZNm8gJAP/jmNUB4NGxe/du/fTTT5Kk/Pnza9CgQSpbtqycnZ0lSZUqVcrK8B55devW1Z49e7I6jIeyfft2/fbbb5Kk4cOHZ20wMK1fv978PPz8/FSyZEmr7Tt16qQKFSro8OHDGjp0qGbOnGmHKAEAAABkN5cvX9bkyZM1adIkBQcHJ7r222+/ydvbW3379lXv3r1VqFChLIoSyHokWJFmy5cvN4+//fZbdevWLdl28SvlkD4P/uWvRIkSioqK0qVLlzJ97tdff13PPvus1TZPPPFEquMMHTpUklSjRg01a9YsQ2KD/Tk4OOj9999Xjx49NGfOHH3yySfy9fXN6rAAAAAA2NEPP/ygwYMH6/79+ym2CQ4O1scff6wRI0ZozJgxGjBggB0jBLIPEqxIs5CQEPP4qaeeysJIHk/PPvusOnTooBo1aqhmzZoqWLCgevbsqaCgoEyf++mnn5a/v/9DjXHw4EGtXLlSkhQQEJABUSErdezYUQMGDFB4eLjGjRunCRMmZHVIAAAAAOxkyJAhGj16dJLnfX19lTdvXt26dUvHjh0zn79//74GDhyoixcvauTIkfYMFcgWqMGKNEv4VysXF5csjOTxNGbMGA0dOlStW7dWwYIFszqcdItPwDk6Oqa4uhmPDjc3N7Vv316SNGPGDIWHh2dxRAAAAADs4YcffkiUXPXw8NCgQYN05MgRHT16VNu3b9fRo0d15MgRDRo0SB4eHmbbUaNG6YcffsiKsIEsRYL1EXLz5k0FBQWpR48eqlKlivLkySMnJyd5eXmpdu3aGjJkiC5evJihc549e9bc5CjhSsonnngi0QZICUsCDBs2zHz+wQ11pk6dal5r0KCB1YLYixcvNttWrVo1xdsS9u/fr0GDBqly5cry9PSUq6urSpQooU6dOmnx4sVpfq0LFy5Uy5YtVbBgQeXMmVOlSpXSK6+8or///jvNY/xbRUVF6ZdffpEkNWvWTAUKFEhTv2vXrumzzz6Tn5+fihQpImdnZ7m7u6tixYrq1auXfvvtN0VHR6fY/8qVKxo6dKhq1aql/Pnzy8XFRcWKFVPr1q01ZcoUq32luHqk8d8xSYqOjtaPP/4oPz8/FS5cWDly5JCfn5/ZvmTJkrJYLGYN0/DwcH399deqU6eOChQoIAcHB/Xs2TPJPPfv39ekSZPUunVrFS9eXK6urvL09FT16tX18ccf6/Lly2l6vyRp27ZtGjBggCpVqiRPT085OTmpQIECatCggYYMGaJDhw6ZbeM3MktYD7dx48ZJNjBL+BoTeuGFF8zXuWjRojTHCAAAAODRdPnyZQ0ePNg89/Hx0e7duzVu3DiVK1cuUdty5cpp3Lhx2rVrl3x8fMznBw8enK7fcYDHgoFHwo0bNwxnZ2dDktVHrly5jHnz5mXYvGfOnEl1TknG0KFDzT5Dhw41nz9z5kySMTt37mxeHzFiRLLzXrhwwfDy8jIkGa6ursbBgweTtImMjDQGDBhgODg4WI2tRYsWxu3bt1N8jZGRkUaXLl1S7O/i4mJMnz491deVGXr06GHOuW7dugwde+rUqebYU6dOfaix1q1bZ4713//+N019fvrpJ8Pd3T3V79a0adOS7T9//nwjd+7cVvtWqlTJOH36dIoxNGrUyGx79epVo06dOknGaNSokdnex8fHkGT4+PgYJ0+eNHx9fZO079GjR6I5tm3bZnh7e1uN083NzVi4cKHV9+vOnTtGp06d0vTzGC/hZ2ztkfA1Pjhn/M9Xx44drcZni5CQEDOGkJCQDB8fAAAAQPqMHj3a/De6h4eHcezYsTT1O3r0qOHh4WH2/eyzzzI5UsA2mfV7KDVYHxExMTGKjIxU8eLF1bRpU1WpUkWFChWSxWJRSEiItm7dqiVLligiIkIvvPCCSpQooTp16jz0vAULFtSvv/4qSRo/frzWrVsnSZo0aVKi29gf/EuWNZMmTdK2bdt0/vx5jRgxQs2bN1ft2rXN64ZhqEePHrp+/bok6YsvvlDFihUTjWEYhjp16qQlS5ZIkooXL67u3burcuXKcnFx0alTpzRjxgwdOXJEK1asULt27bRmzRrlyJEjSTz9+/fXvHnzJEk5c+ZUr169VKdOHRmGoU2bNikoKEi9e/d+rDdt+uGHH/Sf//xHwcHBcnBwUMGCBVW7dm117txZ/v7+cnCwvth99erV5nHNmjVTne+///2vPvjgA/Pcz89PrVu3Njf2OnHihNauXastW7bIMIwk/ZcuXaquXbsqNjZWUtyqWX9/f3l5eenUqVOaOnWqTp48qYMHD6pBgwbau3dvqqtqX3rpJW3fvl01a9ZU165dVaJECV27di3ZTcbu37+v559/XseOHdOzzz4rf39/FSpUSBcuXEjUbvPmzWrevLnu3r0rBwcHtWrVSk2bNlXRokUVGhqqDRs2aM6cOQoPD1fnzp21fPnyZL9nd+/elZ+fn/bs2SMp7vb9bt26qU6dOsqTJ49u3rypffv26ffff09UK/nZZ5/Vr7/+qrlz55orjEeOHKlKlSolGj9//vzJviceHh4qX768Dh06pDVr1sgwDHO1LwAAAIDHS0xMjCZOnGieBwYGqmzZsmnq6+vrq169emn8+PGSpIkTJ+q9995L9ndw4HFEgvUR4ebmptWrV6tJkyYptjlw4IBatGihS5cu6b333tPGjRsfet5cuXKZmx/99ttv5vPNmzc3b5NOL09PT82YMUNNmjRRdHS0XnzxRe3bt0/u7u6SpLFjx2rVqlWSpOeee06vv/56kjHGjRtnJlcDAwP1/fffy9XVNVGbd999V3369NHUqVO1YcMGTZo0KcmOhuvWrdPkyZMlxSWZ1q9fnyiZGxAQoFdeeUXNmjXTsmXLbHq9j4Jdu3YlOj9z5ozOnDmjuXPn6umnn9a8efNUunTpFPvv3LlTUtzu8zVq1LA615YtW/TRRx9Jivtez549W+3atUvSbvjw4Tpx4kSS0hC3b99W7969FRsbK4vFoh9++EH9+vVL1Gbw4MHq1q2bFi9erH/++UevvfaamWBMyYoVK/Thhx9q9OjRqSYRL126pEuXLmnChAlJ5o4XGhqq7t276+7du/Ly8tLSpUtVt27dRG0CAwP1xhtvqGnTprp165Z69eql06dPy9nZOVG7t956y0yu1qpVS7/99puKFCmSZM7vv/8+UVkMb29veXt7a9++feZzzzzzTIolAZJTu3ZtHTp0yCxin54/ppw/f97q9YwuaQIAAICMERYWprCwsKwOA3a2devWRAs2+vfvn67+/fv3NxOswcHBWrx4serVq5ehMSL7c3d3N/M7/yoZthYW2cKUKVPMpc7nzp3L0LET3q5u7Rb5tN5K/8EHH5jtevbsaRiGYezbt89wcXExJBkFCxY0Ll26lKTf3bt3jQIFChiSjHr16hmxsbEpzhEZGWmULl3akGQ8+eSTSa63adPGjGH+/PkpjjNhwoREt1Q/LiUCHB0djUaNGhmffPKJERQUZMyfP9/44YcfjJdeeslwdXU15y5YsKDV71ORIkUMSUaxYsVSnbdp06bmuDNnzkx33GPHjjX79+rVK8V2oaGhRrFixQxJhoODQ7KlAhKWCKhTp47V75Jh/H+JAElGly5drLb96quvzLbLly+32jbhz+2D78mZM2cMR0dHQ5JRuHBh49q1a1bHSk7Cn8n0fo+++OKLNP2MJCfhz0xqD0oEAAAAZB8J//3I49/58PX1tem7k1wZNR7/rkfCEpLZUWaVCGCTq8dMwr8Oxa8qzK5GjBhhrnacNm2aZsyYoRdeeMFcsThlyhQVKlQoSb/ly5fr6tWrkuJWKlpbbejk5KSuXbtKkk6ePJlo06179+5pxYoVkuJW+nXs2DHFcQIDA+Xp6Zm+F5jNPfPMMzp37pzWr1+vESNGKCAgQJ06dVL//v01Y8YMHT9+3Lzd/8qVK+rRo0ey40RGRpq30efLl8/qnFeuXDHLCVSoUEEvvvhiuuOOL1khSe+9916K7dzd3c0Vy7GxseaK55T0798/Xbe/Dxw40Or1GTNmSJIqVaqkFi1aWG3btWtXOTrG3VAQv3o73rx588zNut566y15eXmlOcaMkPB7f+7cObvODQAAACBr5M2b16Z+efLkydhAgEcEJQIeMefOndO0adO0fv16HT16VLdu3dK9e/eSbfvPP//YObr0cXJy0uzZs/XUU08pPDxcAQEB5rUBAwaodevWyfbbtGmTeXz9+vVEpQuSc+PGDfP4yJEjZmmD/fv3KyoqStL/7yafEmdnZ9WvX1+///57ai/rkfHkk09avV6iRAn9+eefqlSpki5duqT169dr27ZtSW5zv3XrllknNbUk9ObNm83jNm3apDtmwzC0e/duSXF1d1O7Xb1Zs2b6+OOPJUk7duyw2jY9t67kyJFDtWrVSvH67du39ffff0uSChQokOp3VIpLCN+6dUtHjhxJ9HzC96xt27ZpjjGjJEya37x5M119E95elJyLFy9afR8BAAAAZI1bt27Z1O/27dsZGwjwiCDB+ggZN26cPvjggyQ1KVNy586dTI7o4ZUpU0bffPONXnnlFfO5ChUq6KuvvkqxT8JVdH379k3XfAkTRAk3JEot2ZjWNo8bLy8vDRo0SB9++KEkadmyZUkSrAm/jx4eHlbHS5j0L1++fLrjuX37tiIiIiTFfXdSk7Age3KbVSVUrFixNMfh5eWVpOZvQiEhIeYGXOvWrTM3h0uLB5OY8e+ZxWKRr69vmsfJKLlz5zaP7969m66+xYsXz+hwAAAAYAfvvPNOinsN4PG1detW887OY8eO6ejRo+nag+HIkSM6duyYeb5w4UJqsP4L/Svrr4oE6yNj1qxZeuuttyTFbSTUpEkT1atXT97e3nJ3dzc3xbly5YqZdIyJicmyeNPjwURZq1atlDNnzhTbP8xfxCIjI83j8PBw8zhXrlyp9nVzc7N53kdZwg2REv7PMp6Li4t5nFpSP+F1W/6jm7DQflo+j4RtQkNDrba19p1Lb9uM+o5K//+e5cqVSw4O9q/qkvC1pOc9AgAAwKPrX7tJzb9c+/bt5e3treDgYEnSxIkTNW7cuDT3nzhxonns7e2t9u3bK0eOHBkdJpAtkWB9RHzyySeS4lYIrlmzxqyN+aBDhw7ZM6yHdvv27USlAaS4lbqdOnVS7dq1k+2T8H/0Fy5cSHY39bRImHyLXxVpTcKE7L9Jwpqfyd0mkjdvXlksFhmGkagcQ3ISrnC1ZVfShJ99Wj6PhG1SW12bkRLG2adPH02aNMnmseLjjoiIUGxsrN2TrAk/09Rq7AIAAAB4dOXIkUN9+/Y1y6xNmTJF/fv3T9OddMeOHdPUqVPN8379+pFcxb8Km1w9Ak6fPq0zZ85IikvWpJRclZRoE6dHQf/+/c1b/v39/WWxWBQdHa2XXnopxQRcwlu5z58/b/PcRYsWNY9PnjyZavu0tHkcXb9+3TxOrtC5s7Oz+V6mlmBNeMv4g7VG0yJPnjzmauMTJ06k2j5hG1sT8bZI+N16mO+o9P/vmWEYya4gzmwJSxb4+PjYfX4AAAAA9tO7d2/zLsXQ0FC1aNEi1d9Djh07phYtWph3Dbq4uKh3796ZHiuQnZBgfQRcvnzZPE6tDujy5cszO5wMM2PGDM2ZM0eSVLduXS1YsEBvv/22pLhk5htvvJFsv4YNG5rHK1eutHn+qlWrysnJSZK0fv16c6Om5ERGRmrLli02z/Uo27Bhg3mcUt3TSpUqSYqrc2rt9vj69eubx7ZsGGaxWFSjRg1JcYnLo0ePWm2/atUq89iemykVKFDArDG7efPmNK2QTskzzzxjHi9dutSmMRKuerX2PU9Owvc4/nMGAAAA8HgqVKiQxowZY56fO3dONWvW1KBBg5L8/nX06FENGjRINWvWTLRXypgxY1SwYEG7xQxkByRYHwEJ64NaW0UZHBysadOm2SGih3f27Fm99tprkuJugZ45c6Zy5Mih0aNHq1q1apKkqVOnauHChUn6tmrVyrxt/dtvv9XVq1dtisHV1VUtW7aUFPfe/frrrym2nTZtWrp3UH8c3LhxQ99884153qpVq2TbxZdzMAxDu3btSnG8QoUKqWnTppKkw4cPa9asWemO6fnnnzePv/zyyxTbhYeH64cffpAUl2Bs165duud6GPGlL+7cuWN107bUdO3aVY6OcdVcxo4dm2hFcVqlt7RCQjt27JAkeXp6Jto0DAAAAMDjacCAARoyZIh5HhoaqvHjx6t8+fIqV66cateurXLlyql8+fIaP358ov0uhgwZogEDBmRF2ECWIsH6CChfvrxZL/Tnn3/WqVOnkrS5cOGC2rVrZ1NdS3uLiYnRSy+9ZG7e8+2336pUqVKS4m43nz17trmZTp8+fRLtPC/FJYs+/fRTSXGre1u2bGm1NIJhGFqzZo1Gjx6d5Fr8xmFSXLmC5G5b37Vrl9599930vcgsZrFYzEdy7822bdv0888/6/79+ymOcf78eT333HO6ePGipLiVwwlXoCbUrFkz83jnzp1WYxs6dKi5orJv375WV2WeOnUqSV3hXr16qUCBApLikvA//fRTkn73799XQECAeXt+586d9cQTT1iNK6O99tprKlGihCRp+PDh+uabb6yuHr169apGjRqlAwcOJHrex8dHvXr1khS3Qrh169bmZ/IgwzC0ZMmSJM8nfO179uxJ82sIDQ01/0rdtGlTWSyWNPcFAAAA8OgaOXKkvv/++0SbGktx5QB27tyZpGyAi4uLvv/+e40cOdKeYQLZBptcPQKcnZ3Vt29fjRkzRrdv31a1atXUp08fVa1aVRaLRbt27VJQUJDu3LmjgIAATZ8+PatDtmr06NHm7fZdunRRjx49El0vX768vvrqKw0cOFA3btxQjx49tGrVqkTJnTfeeEM7d+7UrFmztGfPHvn6+srf318NGjRQoUKFFBUVpcuXL2v//v1atWqVLly4oCZNmpjFuuM1btxYvXv31uTJk3XlyhVVr15dgYGBqlOnjgzD0KZNmxQUFCTDMNSqVSstW7Ys096XvXv3Jlmxu3fvXvN48uTJWr16daLr77zzTrJ1UVNz+fJlvfrqq3r77bfVokULVa9eXcWKFZOrq6uuX7+uLVu2aMGCBbp7964kqXDhwgoKCkpxvLp166pQoUK6fPmy1q9fr48++ijFts8884xGjBihIUOGKDw8XO3atVPjxo3VunVrlShRQlFRUTp16pTWrVunjRs3avLkyapYsaLZP3fu3Jo8ebL8/f0VGxurPn36aMGCBerQoYPy5cunU6dOaerUqWb91WLFiunbb79N93v0sNzd3fXbb7/Jz89PoaGhevPNNzVhwgQ9//zzKleunHLlyqU7d+7oxIkT2r59uzZt2qSYmBj5+fklGWvcuHHauXOn9u/frx07dqhMmTLq1q2b6tSpo7x58+rWrVs6cOCAli5dqrNnzyZJ5DZo0EBOTk6KiorSl19+KQcHB1WpUkXOzs6S4javSq6EwqZNmxQbGyspbkdRAAAAAP8eAwYMUMeOHTVlyhRNnDhRwcHBSdp4e3urX79+6t27N2UB8O9m4JFw9+5do3HjxoakFB99+/Y1Tp06ZZ4PHTo0Q2Po0aOHOfaZM2dSbDd06NAU223bts1wdHQ0JBnFixc3bty4keI4rVu3Nsf56quvklyPiYkxhg4dajg7O1t9X+IfAQEByc4TGRlpdO7cOcV+Li4uxowZM6y+rowwderUNL2OhI+U4kitza+//prmOerWrWucPn061fgHDRpkSDJy5MhhXLp0KdX23333nZEzZ85U5w8KCkq2//z58w0PDw+rfStWrGg19kaNGplt08LHx8eQZPj4+KSpvWEYxuHDh43KlSun6b12d3c3Dhw4kOw4t27dMtq2bZvqGBaLJdn+H374YYp9GjVqlGyfF154wZBkuLm5GWFhYWl+zWkVEhJixhASEpLh4wMAAADIGNHR0cayZcsMf39/o1WrVoa/v7+xbNkyIzo6OqtDA9Ils34PpUTAI8LV1VUrV67UDz/8oLp168rDw0MuLi7y8fFRly5dtGLFCk2cODHRZjbZTVhYmF566SVFR0fLwcFBM2bMkKenZ4rtp0yZokKFCkmSPvroI+3fvz/RdQcHBw0bNkynTp3Sp59+qvr166tgwYJycnJSrly55OPjo5YtW2rUqFHav39/iiswnZycNG/ePC1YsEDNmzeXl5eXXFxc9MQTTygwMFC7du3SSy+9lHFvRDbQtGlT/fbbb3r//ffVuHFjlSlTRp6ennJ0dJSnp6cqVaqkV155RatWrdLWrVvTdHt93759ZbFYFBMTY25eZs3AgQN16tQpDR06VHXq1FH+/Pnl6OgoDw8Pc/4//vgjxfe+U6dOOnnypD755BPVqFFDnp6ecnJyUpEiRdSyZUtNnjxZ+/bts3tpgAeVL19e+/fv16JFi/TSSy+pdOnScnd3l6Ojo/Lly6fq1aurd+/emjt3ri5duqTKlSsnO06ePHm0ZMkSrVu3ToGBgSpTpow5TsGCBdWwYUMNHTo0xR0+P/vsM82ZM0ctW7ZU4cKFzdWrKQkPD9fixYslxdWTjS9TAgAAAODfJ0eOHGrQoIGqVaumWrVqqVq1amrQoIFy5MiR1aEB2YLFMNK5pTQApKB169ZatmyZnnrqqXTV+kT2M2PGDAUEBMjBwUGHDx+Wr69vhs9x/vx5s05tSEiIihcvnuFzAAAAAMgYYWFh+vrrr83zt99+O9GGusCjILN+D82+yx0BPHKGDh0qKa527IoVK7I4GtjKMAz997//lSR17949U5KrAAAAAB4tjo6Oeuqpp8yHoyPb+gDx+GkAkGFq1aql559/XosWLdLw4cPVokWLrA4JNliwYIEOHTokFxcXDR8+PKvDAQAAAJANuLq6ql27dlkdBpAtsYIVQIb6+uuv5erqqm3btrGK9RFkGIZGjhwpSRo8eLBKly6dxREBAAAAAJC9sYL1X+C3336zuW+5cuVUrly5jAvmMRMcHPxQtUabN2+uXLlyZWBEWa9kyZK6e/duVocBG1ksFh04cCCrwwAAAAAA4JFBgvVfoEOHDjb3HTp0qIYNG5ZxwTxm1q5dq169etnc/8yZMypZsmTGBQQAAAAAAAC7IsEKAAAAAAAAq6Kjo3XmzBnz/IknnmCjK+B/+En4FzAMI6tDeGz17NlTPXv2zOowAAAAAADIVPfu3dPs2bPN87ffflvu7u5ZGBGQfbDJFQAAAAAAAADYiAQrAAAAAAAAANiIBCsAAAAAAAAA2IgEKwAAAAAAAADYiAQrAAAAAAAAANiIBCsAAAAAAAAA2IgEKwAAAAAAAADYiAQrAAAAAAAAANiIBCsAAAAAAAAA2IgEKwAAAAAAAADYyDGrAwAAAAAAAED25uTkpAYNGiQ6BxCHBCsAAAAAAACscnFx0bPPPpvVYQDZEiUCAAAAAAAAAMBGJFgBAAAAAAAAwEYkWAEAAAAAAADARtRgBQAAAAAAgFVRUVE6ePCgeV6pUiU2ugL+hwQrAAAAAAAArLp//76WLFlinpcpU4YEK/A/lAgAAAAAAAAAABtlqxWsUVFR+vPPP3Xs2DHlzJlTDRs2VJUqVbI6LAAAAAAAAABIll0SrLdv39bo0aMlSX5+fmrVqlWSNkePHlXbtm11+vTpRM9369ZN06ZNY9k5AAAAAAAAgGzHLgnW1atX66uvvpLFYlHHjh2TXI+KilL79u116tSpJNfmzp0rFxcXTZkyxR6hAgAAAAAAAECa2aUG66pVqyRJRYoUUe3atZNcDwoK0okTJ2SxWJQvXz7169dP/fr1k7u7uwzDUFBQkPbs2WOPUAEAAAAAAAAgzeyygnXXrl2yWCyqW7dustenT58uSXJxcdG2bdtUpkwZSdILL7yghg0bmm2efvppe4QLAAAAAAAAAGlilxWs586dkySVLVs2ybWwsDBt27bNLB8Qn1yVpGeeeUZ16tSRYRjaunWrPUIFAAAAAAAAgDSzS4I1NDRUkpQnT54k17Zv366YmBhJUtu2bZNcjy8p8ODmVwAAAAAAAACQ1eySYLVYLJLiNrN60Pbt283j+HIACRUsWFDS/ydpAQAAAAAAACC7sEuCNV++fJL+v1RAQuvWrZMklSpVSoULF05yPTIyUpLk6GiXcrEAAAAAAAAAkGZ2yVpWqFBBly5d0ooVKxQbGysHh7i87sWLF7Vp0yZZLJZkV69K0j///CNJyp8/vz1CBQAAAAAAwAOcnZ313HPPJToHEMcuK1hbtWolSTp//rwGDhyomzdv6p9//lGvXr0UHR0tSWrXrl2yfffu3SuLxaLSpUvbI1QAAAAAAAA8wNnZWbVq1TIfJFiB/2eXBGtgYKC5AvXHH39U/vz55e3trVWrVpnJ0+Q2uLp8+bL27t0rSapevbo9QgUAAAAAAACANLNLgjVv3ryaP3++3N3dZRhGooeHh4dmzpxplg1IaM6cOYqNjZUk+fn52SNUAAAAAAAAAEgzu+0c1ahRIx0+fFg//vij9u7dq5iYGFWtWlX9+vWTt7d3sn2WLl0qHx8fOTs7q0mTJvYKFQAAAAAAAADSxG4JVkkqVqyYhg8fnub2a9asycRoAAAAAAAAkBaRkZHasWOHeV67dm3qsAL/Y9cEKwAAAAAAAB49kZGRWrt2rXn+1FNPkWAF/scuNVgBAAAAAAAA4HFEghUAAAAAAAAAbJShJQKmT5+ekcMlERAQkKnjAwAAAAAAAEB6ZGiCtWfPnrJYLBk5pMlisZBgBQAAAAAAAJCtZPgmV4ZhZPSQAAAAAAAAAJAtZWiCdejQoRk5HAAAAAAAAABkayRYAQAAAAAAAMBGDlkdAAAAAAAAAAA8qkiwAgAAAAAAAICNSLACAAAAAAAAgI0ytAZrWgUHB+uPP/7Q7t27dfXqVYWHh6tfv37q3LlzonY3btyQJLm4uMjNzS0rQgUAAAAAAPjXc3V1TZS3cXV1zcJogOzFrgnWO3fu6I033tDs2bMVExMjSTIMQxaLRa1bt07SvmnTptq/f7/Kli2rI0eO2DNUAAAAAAAA/I+jo6MqVKiQ1WEA2ZLdSgRcunRJ1atX14wZMxQdHS3DMGQYhtU+b775pgzD0PHjx7Vr1y47RQoAAAAAAAAAaWO3BGuHDh106tQpGYahWrVqadasWTp48GCqfZydnSVJf/75pz3CBAAAAAAAAIA0s0uJgAULFmjHjh2yWCzq3r27pk+fLgeH1HO7Hh4eqlmzprZu3apt27bZIVIAAAAAAAAASDu7JFh/+eUXSVL+/Pn1008/pSm5Gq9q1arasmWLjh07llnhAQAAAAAAwIr79+9rzZo15nmTJk3k4uKShREB2YddEqw7d+6UxWJRu3btlDNnznT1LViwoCTp6tWrmREaAAAAAAAAUhEVFZVof5yGDRuSYAX+xy41WK9cuSJJKlWqVLr7xtdgjYyMzNCYAAAAAAAAAOBh2SXBGv8Xjejo6HT3vXbtmiTJ09MzQ2MCAAAAAAAAgIdllwRroUKFJEknTpxId98dO3ZIkry9vTM0JgAAAAAAAAB4WHZJsNavX1+GYWj58uXputX/6NGj2rp1qywWixo2bJiJEQIAAAAAAABA+tklwdqpUydJ0vXr1zV8+PA09bl//7569eolwzAkSS+88EKmxQcAAAAAAAAAtrBLgrVVq1aqU6eODMPQf/7zH73//vuKiIhIsf2WLVtUv3597dixQxaLRa1bt9bTTz9tj1ABAAAAAAAAIM3skmCVpDlz5qhgwYIyDENfffWVChcurJYtW5rXZ86cqTZt2sjb21sNGzbU3r17JUnFihXTlClT7BUmAAAAAAAAAKSZ3RKsPj4+2rhxoypXrizDMBQWFqZVq1bJYrFIkvbv368///xT//zzjwzDkGEYqlSpkjZu3Kj8+fPbK0wAAAAAAAAASDO7JVglqWzZstq1a5cmTJigKlWqSJKZTE34KFu2rL755hvt2rVLJUuWtGeIAAAAAAAAAJBmjvae0NnZWX379lXfvn11/fp1HThwQNevX1d0dLS8vLxUrlw5lShRwt5hAQAAAAAAAEC62T3BmpCXl5caN26clSEAAAAAAAAgFTlz5lRgYGCicwBxsjTBCgAAAAAAgOwvR44c3HEMpMCuNVgBAAAAAAAA4HFCghUAAAAAAAAAbJShJQJy5MiRkcMlYrFYFB0dnWnjAwAAAAAAAEB6ZWiC1TCMjBwOAAAAAAAA2cDdu3f1+++/m+dt2rRhoyvgfzI0wert7S2LxWK1TXBwsKT/T8a6uLjI09NTknTz5k3dv39fUtyKVYvFQgFlAAAAAACALBYTE6PDhw+b588991wWRgNkLxmaYD179myK10JDQ9W7d2+dO3dOnp6eeuutt9SxY0eVK1fOTMoahqGjR49q4cKFGjdunG7evKlatWrp559/loeHR0aGCgAAAAAAAAAPLUMTrNZ069ZNy5cvV82aNfX777+rQIECSdpYLBaVL19eQ4YMUb9+/dSmTRstWLBAd+7c0Z9//mmvUAEAAAAAAAAgTRzsMcmcOXP0559/ysPDQ4sXL042ufqg/Pnz69dff5W7u7tWrlyp2bNn2yFSAAAAAAAAAEg7uyRYg4KCZLFY1L59exUuXDjN/YoUKaIOHTrIMAwFBQVlYoQAAAAAAAAAkH52SbAePHhQkuTr65vuvmXLlpUkHTp0KENjAgAAAAAAAICHZZcE6/Xr1yVJYWFh6e4b3yd+DAAAAAAAAADILuySYM2fP78kadWqVenuG98nfgwAAAAAAAAAyC7skmCtV6+eDMPQnj17NHny5DT3mzJliv766y9ZLBbVrVs3EyMEAAAAAAAAgPSzS4K1T58+5nG/fv00bNgw3b17N8X29+7d0/Dhw9W3b1/zuYTHAAAAAAAAAJAdONpjkiZNmigwMFBTpkxRbGysRo4cqbFjx6p58+aqVq2avLy8JMXVWd2/f79Wrlyp0NBQGYYhSerZs6eaNGlij1ABAAAAAAAAIM0sRnwWM5PFxsaqT58+mjJlyv9PbrEk2zZhSD179tRPP/2kHDlyZHqMAAD7OX/+vEqUKCFJCgkJUfHixbM4IgAAAAApiY2N1e3bt83zPHnyyMHBLjdGAxkms34PtdtPgoODg37++WctWbJENWvWlBSXSE3uIUk1a9bU4sWLNWXKFJKrAAAAAAAAWcjBwUGenp7mg+Qq8P/sUiIgoTZt2qhNmzY6ceKEtmzZouPHj+vmzZuSJE9PT5UtW1b16tVT2bJl7R0aAAAAAAAAAKSL3ROs8cqUKaMyZcpk1fQAAAAAAAAA8NBYzw0AAAAAAAAANsqyFawAAAAAAAB4NERERGju3Lnmebdu3ZQrV64sjAjIPrIkwXrnzh0tWLBAmzdv1okTJ3Tjxg1JUr58+VS2bFnVr19fnTp1Uu7cubMiPAAAAAAAACQQGxurkJCQROcA4tg1wRoTE6MRI0Zo7NixCg8PT7bN1q1bNW3aNL355psaPHiwPvnkE+XIkcOeYQIAAAAAAABAmtgtwXr37l0999xz2rRpkwzDSLV9WFiYRo4cqQ0bNujPP/+Uq6urHaIEAAAAAAAAgLSzW4I1MDBQGzdulMVikSTVrl1bXbp00dNPP638+fNLkq5du6a9e/dq/vz52rZtmwzD0MaNGxUYGKjZs2fbK1QAAAAAAAAASBO7JFi3bNmiX375RRaLRW5ubgoKClKHDh2SbduoUSO9+eabWrx4sQICAhQaGqpffvlFr732murVq2ePcAEAAAAAAAAgTRzsMcn06dPN40WLFqWYXE2offv2WrhwoXkeFBSUKbEBAAAAAAAAgK3skmCNLw3g5+enpk2bprlf06ZN9eyzz5qlAgAAAAAAAAAgO7FLgvXChQuSpPr166e7b3xZgPgxAAAAAAAAACC7sEuCNSoqSpLk7Oyc7r7xfeLHAAAAAAAAAIDswi4J1oIFC0qSDh48mO6+8X3ixwAAAAAAAACA7MIuCdYaNWrIMAwtXbpU586dS3O/c+fOaenSpbJYLKpRo0YmRggAAAAAAAAA6edoj0k6dOigRYsW6d69e2rXrp2WL1+uIkWKWO1z6dIl+fv76+7du7JYLOrYsaM9QgUAAAAAAMAD3Nzc9NFHH5nnjo52SSkBjwS7rGB94YUXVKlSJUlxt/yXL19eQ4cO1YEDBxQbG2u2i42N1d9//62hQ4eqQoUKOnDggCwWiypXrqxu3brZI1QAAAAAAAA8wGKxyMnJyXxYLJasDgnINiyGYRj2mOjkyZOqV6+erl+/LsMwzB9EJycn5cmTR5J0+/ZtczOr+LAKFiyoLVu2qHTp0vYIEwBgJ+fPn1eJEiUkSSEhISpevHgWRwQAAAAAeJxl1u+hdlnBKklPPvmktm/frtq1a0uKS6AahqHIyEhdu3ZN165dU2RkpPm8JNWrV0/bt28nuQoAAAAAAAAgW7JbglWSSpUqpa1bt2r58uXq3r27vL29JSlRUtXb21vdu3fX8uXLtXnzZpUsWdKeIQIAAAAAAABAmmVJReLmzZurefPmkqSoqCjdvHlTkuTp6SknJ6esCAkAAAAAAAApCA8P1+TJk83z3r17y83NLQsjArKPLN/yzcnJSQULFszqMAAAAAAAAJACwzDMBXLx5wDi2LVEAAAAAAAAAAA8TkiwAgAAAAAAAICNsqREQFhYmP7++29dunRJ4eHhio2NTVO/gICATI4MAAAAAAAAANLOrgnWzZs3a9SoUVqzZk2ak6rxLBYLCVYAAAAAAAAA2YrdEqyff/65hgwZIolCyAAAAAAAAAAeD3ZJsK5atUoff/yxeZ4nTx41btxYpUqVkru7uywWiz3CAAAAAAAAAIAMZZcE69ixYyXF3ebfv39/ffnll8qZM6c9pgYAAAAAAACATGOXBOuuXbtksVhUvXp1fffdd/aYEgAAAAAAAAAynYM9JomIiJAkPffcc/aYDgAAAAAAAADswi4J1hIlSkiSnJ2d7TEdAAAAAAAAMpiLi4v5APD/7FIioF69ejpx4oQOHTpkj+kAAAAAAACQgdzd3fXBBx9kdRhAtmSXFayDBg2SJC1ZskQXLlywx5QAAAAAAAAAkOnskmCtWrWqPvzwQ0VERKh9+/a6cuWKPaYFAAAAAAAAgExllxIBkjRq1CjlzJlTw4YNU/ny5fXqq6+qWbNmKlGihFxdXdM0hre3dyZHCQAAAAAAAABpZzEMw7DXZHfu3FHfvn31yy+/yGKxpKuvxWJRdHR0JkUGALC38+fPm5sghoSEqHjx4lkcEQAAAADgcZZZv4fabQXr1q1b1a5dO928edNMrtoxtwsAAAAAAAAbhYWFafz48eb5G2+8IXd39yyMCMg+7JJgPXv2rFq2bKmwsDDzOU9PTz3xxBNyd3dP92pWAAAAAAAA2FdUVFRWhwBkS3ZJsH7++ecKCwuTxWJRrVq1NHbsWNWpU8ceUwMAAAAAAABAprFLgnX9+vWSpBIlSmjt2rXKmTOnPaYFAAAAAAAAgEzlYI9JQkJCZLFY1LFjR5KrAAAAAAAAAB4bdkmw5suXT5JUoEABe0wHAAAAAAAAAHZhlwRr5cqVJUnnzp2zx3SAZs6cqaZNm6pAgQJydHSUxWKRxWIxy1UMGzbMfO7s2bNZGmtGelRfV79+/WSxWFS3bt2sDuVfzzAMVa5cWRaLRR999FFWhwMAAAAAQLZnlwRrjx49ZBiGlixZort379pjSvyL9enTRy+//LLWrFmja9euKSYmJqtDemxERkZq9+7dmjBhggIDA1W5cuVkE9jpsXfvXv3000+SpOHDh2dwxEgvi8WiTz75RJI0ZswYnTp1KosjAgAAsI+YmBitX79eQUFB+v777xUUFKT169fz+wQAIFV22eSqW7dumjVrlv744w/169dP06ZNk8ViscfU+JfZvXu3mazLnz+/Bg0apLJly8rZ2VmSVKlSpawM75FXt25d7dmzJ0PH/PDDDxUbG6t69eqpefPmGTo2bNOpUydVqFBBhw8f1tChQzVz5sysDgkAACDTXL58WZMnT9akSZMUHByc5Lq3t7f69u2r3r17q1ChQlkQIQAgu7PLClZJmjt3rrp06aKZM2fqmWee0Z9//ql79+7Za3r8Syxfvtw8/vbbbzVkyBB16dJF/v7+8vf3V/78+SXF3UpvGIYMw1DJkiWzKNpHz4N/vS9RooQKFy5s83jbtm3TihUrJInb0bMRBwcHvf/++5KkOXPm6NixY1kcEQAAQOb44Ycf5OPjo48//jjZ5KokBQcH6+OPP5aPj49++OEHO0cIAHgU2GUFa6lSpcxjwzC0fft2tWnTRjly5FD+/Pnl6uqa6hgWi4VbVZGqkJAQ8/ipp57KwkgeT88++6w6dOigGjVqqGbNmipYsKB69uypoKAgm8b7+uuvJUkFCxZUixYtMjJUPKSOHTtqwIABCg8P17hx4zRhwoSsDgkAACBDDRkyRKNHj07yvK+vr/Lmzatbt24l+kPz/fv3NXDgQF28eFEjR460Z6gAgGzOLgnWs2fPmiUBEpYGiI6O1uXLl1PtbxgGJQWQJvfv3zePXVxcsjCSx9OYMWMybKxLly7pt99+kxRXRsTR0S7/OUIaubm5qX379po9e7ZmzJihr776Sm5ublkdFgAAQIb44YcfEiVXPTw8FBgYqH79+qlcuXLm80ePHtXEiRM1ZcoUhYaGSpJGjRqlIkWKaMCAAXaPG8hKFotFXl5eic4BxLFbiYD427ETPlJ6Prl2eDTcvHlTQUFB6tGjh6pUqaI8efLIyclJXl5eql27toYMGaKLFy9m6JzxCXyLxZJoJeUTTzxhPm+xWDRs2DDz2rBhw8znz549m2i8qVOnmtcaNGhgtaj94sWLzbZVq1ZNlOBNaP/+/Ro0aJAqV64sT09Pubq6qkSJEurUqZMWL16c5te6cOFCtWzZUgULFlTOnDlVqlQpvfLKK/r777/TPEZ2MWfOHPO9ffHFF5Nts379evP9/e6775Jtc/To0USfc0obbc2dO9dss2DBgkTXEn4f4vsvX75cnTp1ko+Pj1xcXJL9x0NkZKQmTpyo5s2bq0iRInJxcVGBAgVUr149jR49Wjdv3rT6HiQ379atW9WtWzd5e3vLxcVFhQsX1vPPP68tW7ZYHSverVu39Omnn6pKlSpyd3eXp6enatSooS+//FLh4eGSpJIlS8piscjPz8/qWC+88IIkKTw8XIsWLUrT/AAAANnd5cuXNXjwYPPcx8dHu3fv1rhx4xIlVyWpXLlyGjdunHbt2iUfHx/z+cGDB6dpsRDwOHFzc9Nrr71mPliAASRgABnkxo0bhrOzsyHJ6iNXrlzGvHnzMmzeM2fOpDqnJGPo0KFmn6FDh5rPnzlzJsmYnTt3Nq+PGDEi2XkvXLhgeHl5GZIMV1dX4+DBg0naREZGGgMGDDAcHBysxtaiRQvj9u3bKb7GyMhIo0uXLin2d3FxMaZPn57q68oMPXr0MOdct25dmvs1atTIkGS4u7sbMTExyba5e/eu4eLiYkgyOnbsmGybH374IcXPOaE+ffoYkgyLxWJcuXIl0bWE79vatWvNtg8+Ejp8+LBRunRpq59rvnz5jOXLl6f4HiScd926dcaIESNS/K5YLBZj4sSJVt5Rw9i3b59RpEiRFOOpWLGiERwcbPj4+BiSjEaNGlkd786dO2Y8Kb3/DyMkJMSMLSQkJMPHBwAASM7o0aPNf4N4eHgYx44dS1O/o0ePGh4eHmbfzz77LJMjBQBktMz6PZR7cpFhYmJiFBkZqeLFi6tp06aqUqWKChUqJIvFopCQEG3dulVLlixRRESEXnjhBZUoUUJ16tR56HkLFiyoX3/9VZI0fvx4rVu3TpI0adIkFSxY0Gz34F+jrZk0aZK2bdum8+fPa8SIEWrevLlq165tXjcMQz169ND169clSV988YUqVqyYaAzDMNSpUyctWbJEklS8eHF1795dlStXlouLi06dOqUZM2boyJEjWrFihdq1a6c1a9YoR44cSeLp37+/5s2bJ0nKmTOnevXqpTp16sgwDG3atElBQUHq3bu3mjVrlubXmJUiIiK0detWSdLTTz8tB4fkF9O7urqqTp062rBhgzZs2JBsuZAHV6ymtII1/vkKFSqoQIECKcb23//+VytWrFDx4sXVs2dPVahQQXfv3tXGjRvNNsHBwWrQoIH5+ZcvX149evRQyZIldfXqVc2fP18bN27UjRs31LZtW61atUqNGjWy9pZo0qRJmjt3rooVK6aePXuqYsWKioyM1PLly/XLL7/IMAy9/vrratCggSpUqJCk/6VLl9SsWTNdvXrVfJ3xMV25ckULFizQhg0b1LVrV0VHR1uNJZ6Hh4fKly+vQ4cOac2aNZRrAQAAj7yYmBhNnDjRPA8MDFTZsmXT1NfX11e9evXS+PHjJUkTJ07Ue++9l+y/3wEA/y4kWJFh3NzctHr1ajVp0iTFNgcOHFCLFi106dIlvffee4mSVrbKlSuX/P39Jcms6SlJzZs3V8mSJW0a09PTUzNmzFCTJk0UHR2tF198Ufv27ZO7u7skaezYsVq1apUk6bnnntPrr7+eZIxx48aZydXAwEB9//33STZ0e/fdd9WnTx9NnTpVGzZs0KRJk5LUclq3bp0mT54sScqfP7/Wr1+fKJkbEBCgV155Rc2aNdOyZctser32tm/fPkVFRUmSatWqZbWtn5+fNmzYoGvXrungwYOqXLlyousbNmyQJNWpU0fbt2/X9u3bde/evUTv9cWLF3X8+HFzPGtWrFihJk2aaPHixYlueQkMDDSPX3nlFTO5GhAQoJ9//llOTk7m9ddee03/+c9/9OGHHyoqKkoBAQE6fvy41brAc+fOVbNmzfTrr78mmrdHjx6qVauWBg8erKioKI0fPz7RLwXx3n77bTO52r17dwUFBSWJafTo0RoyZIjV1/+g2rVr69ChQ+YmD+n5Q8X58+etXs/ociEAgMdDWFiYwsLCsjoMPKa2bt2aaGPc/v37p6t///79zQRrcHCwFi9erHr16mVojIAkubu7m79/AngEZNhaWCCNpkyZYi7HPnfuXIaOnfB2dWu3yKf1VvoPPvjAbNezZ0/DMOJuw46/bb1gwYLGpUuXkvS7e/euUaBAAUOSUa9ePSM2NjbFOSIjI81bzZ988skk19u0aWPGMH/+/BTHmTBhQqLbwbNziYBJkyaZfX766SerbdetW2e2/eabbxJdO3z4sHlt+fLlKcYxe/Zsq+9hwu+Dh4eHcfny5RTj2bt3r9nW19fXuH//fopt27VrZ7adNm2a1Xm9vLyMGzduJDtOTEyM4e3tbUgynnjiiSTXL1y4YDg6OhqSjOLFixsREREpxvTss8+ac6ZWIsAwDOOLL75I0/cvOQm/j6k9KBEAAIiX8P+PPHhk5sPX19em76ivr2+Wx87j8X+kVPosK8XGxhr37t0zH9Z+zwWyq8wqEWC3Ta6AeAn/wrtz584sjCR1I0aMUI0aNSRJ06ZN04wZM/TCCy+Ym1lNmTJFhQoVStJv+fLl5mrCwYMHW72t2snJSV27dpUknTx5MtGmW/fu3dOKFSskSd7e3urYsWOK4wQGBsrT0zN9LzCLBAcHm8f58uWz2rZOnTrmys8Hb/+PLwfh5eWl5s2by9fXN9HzD7azWCyp3qrfqVOnRKUlHhRfjkKS3nzzTTk7O6fY9v333zePE66uTk5AQECKn5+Dg4MZ95kzZ3T37t1E1//44w/ztv9XX31VOXPmTHGeN954w2ocD0oY07lz59LVFwAAIDvLmzevTf3y5MmTsYEAj4jw8HD95z//MR/xm+gCoEQAMsG5c+c0bdo0rV+/XkePHtWtW7d07969ZNv+888/do4ufZycnDR79mw99dRTCg8PV0BAgHltwIABat26dbL9Nm3aZB5fv3491eTajRs3zOMjR46YpQ32799v3krv5+dnNVHr7Oys+vXr6/fff0/tZWW5hK83taSwtTqs8QnXRo0ayWKxqHHjxjp27FiKdVlTq78qKdVbvBL+USC1mrd169aVu7u7wsLCtGPHDqttU6tHXKxYMfP41q1biZKof/31l3mcWgI5tesPSpgAv3nzZrr6Jrz9LjkXL15MtUQEAABAZrl165ZN/W7fvp2xgQAAHnkkWJGhxo0bpw8++MBc4ZmaO3fuZHJED69MmTL65ptv9Morr5jPVahQQV999VWKfRKu9Ovbt2+65kuYxLpw4YJ5/OSTT6baNy1tsoOE3w8PD49U28fXYb1x44YOHDigqlWrSvr/+qvxdVX9/Pw0ceJE7dixw6zDeuHCBZ04cSJRO2sSJjKTE1831MHBQaVKlbLa1mKx6Mknn9S+fft0+fJlq5tE5c+f3+pYCeu3PvjzlfB7klpMefPmlaenZ5qTpblz5zaPH1w5m5rixYunqz0AAJL0zjvvqF+/flkdBh5TW7duNe8KO3bsmI4ePZquGvNHjhzRsWPHzPOFCxdSgxWZgvqrwKOFBCsyzKxZs/TWW29Jiks+NWnSRPXq1ZO3t7fc3d3NW6mvXLliJh1jYmKyLN70KFOmTKLzVq1aWb0N+2H+qh0ZGWkeJ7zlIleuXKn2Tbg5UnaWMFmYliS7n5+fhg8fLinudv+qVavq8OHDunLliiSpcePGZjspLgG5bds2NW7cONFq1rQkWK19rpLMTTdy5cpldUVxvPjPJDY2VhERESl+Rg4OtldsseV7ktYEa8LvcmrvDQAAGYGNXZCZ2rdvL29vb7Nk1cSJEzVu3Lg090+42ai3t7fat2+vHDlyZHSYAIBHDAlWZJhPPvlEUtyKxDVr1qhmzZrJtjt06JA9w3pot2/fTlQaQIpbqdupUyfVrl072T4Jfym4cOGCihQpYtPcCZNxERERqbZ/VGrgJLztPGG5gJTE12G9f/++1q9frzfffNOsq1qgQAFVrFhRklSoUCGVK1dOR48e1bp169S4ceN01V9Ni/jPNiIiwuqK1Hjxn4mDg0Oakp+2ePB74uXllaaY0iLh55NavVwAAIDsLkeOHOrbt68+/vhjSXF7KvTv39+s5W/NsWPHNHXqVPO8X79+JFcBAJIkNrlChjh9+rTOnDkjSerTp0+KyVVJiTZxehT079/fvOXf399fFotF0dHReumll8zVjA9KeJv5+fPnbZ67aNGi5vHJkydTbZ+WNtlBfI1ZKW0J1vg6rJK0ceNGxcbGJqm/Gi9+NWv89fTUX02L+GR5bGysTp8+bbWtYRg6deqUJKlw4cJpWvFqi4Tfk9RiunXrVrpqqSZs6+Pjk/7gAAAAspnevXubd1SFhoaqRYsWiW77T86xY8fUokULhYaGSoq7I6t3796ZHisA4NFAghUZ4vLly+ZxanVAly9fntnhZJgZM2Zozpw5kuI2LFqwYIHefvttSXHJzJR2ZG/YsKF5vHLlSpvnr1q1qpycnCTFJQoNw0ixbWRkpLZs2WLzXPZUqVIl8zi1f8zGi7+9/+bNm9q3b59ZfzU+ofpgu507d+rkyZNm0jkt5QHSIuGmTKtWrbLadvv27eY/wjNzM6fq1aubx/HvS0pSu/6go0ePmscJPzcAAIBHVaFChTRmzBjz/Ny5c6pZs6YGDRqU6N8+Uty/hQYNGqSaNWsm2mdhzJgxKliwoN1iBgBkbyRYkSES3vpsbRVlcHCwpk2bZoeIHt7Zs2f12muvSYorezBz5kzlyJFDo0ePVrVq1SRJU6dO1cKFC5P0bdWqlXmb9rfffqurV6/aFIOrq6tatmwpKe69+/XXX1NsO23atHTv8p5VqlWrZq4a2LFjR5r6JEyQfv/99+Z7+mDiNGEd1s8//zzZ/g/j+eefN4+/+eYbRUVFpdj2iy++MI87dOiQIfMnp3Xr1nJ0jKv48vPPP1vdjGr8+PHpGjv+8/H09FTZsmVtDxIAACAbGTBggIYMGWKeh4aGavz48SpfvrzKlSun2rVrq1y5cipfvrzGjx9v/tFckoYMGaIBAwZkRdgAgGyKBCsyRPny5c06kD///LN5W3RCFy5cULt27VK8rT47iYmJ0UsvvWRuwPTtt9+au7M7Oztr9uzZ5oY/ffr00T///JOov7u7uz799FNJcat7W7ZsabU0gmEYWrNmjUaPHp3kWvzGYVJcuYIjR44kabNr1y69++676XuRWcjV1VXPPPOMJGnv3r2Kjo5OtU98HVZJmj59uqS41QcVKlRI1K5gwYLmc/HtMqr+qhS3qrh58+aS4lY0vPrqq8nG/+WXX+q3336TFLcBQteuXTNk/uQUKVJEXbp0kSSFhISod+/eySZ+R48erbVr16Z53NDQUHMVR9OmTTOtxAEAAEBWGDlypL7//vtEG7BKcXdY7dy5M8mdVi4uLvr+++81cuRIe4YJAHgEsMkVMoSzs7P69u2rMWPG6Pbt26pWrZr69OmjqlWrymKxaNeuXQoKCtKdO3cUEBBgJr6yq9GjR5u323fp0kU9evRIdL18+fL66quvNHDgQN24cUM9evTQqlWrEiWg3njjDe3cuVOzZs3Snj175OvrK39/fzVo0ECFChVSVFSULl++rP3792vVqlW6cOGCmjRpYhbcj9e4cWP17t1bkydP1pUrV1S9enUFBgaqTp06MgxDmzZtUlBQkAzDUKtWrbRs2bJMe1/27t2bZMXu3r17zePJkydr9erVia6/8847yps3b5Kx/P39tWbNGkVERGjnzp2qV6+e1bnj67Bu2LDBTGimlDT18/PT4cOHzXYZVX813k8//aSnn35a169fV1BQkHbv3q2AgACVLFlSV69e1fz5881b8Z2cnDR9+vQk/3DPaF9//bVWrVqlq1evas6cOdq/f7969OiRJKa6desqODhY//zzjxwcrP+NbdOmTYqNjZUUt+MuAADA42bAgAHq2LGjpkyZookTJyo4ODhJG29vb/Xr10+9e/emLAAAIFkkWJFhRo8erb1792rdunUKCwtLVNcoXt++ffXee+9l6wTr9u3bzb9KFy9eXBMnTky23YABA7Rs2TL98ccfWrNmjcaMGWPWZ403ffp0Pfnkk/r8888VGRmpefPmad68eSnOnXBzrIQmTJigO3fuaP78+bp7966+//57ff/99+Z1FxcXTZ48WSdPnszUBOv+/fuTXWUbb+bMmUmee+WVV5JNsHbt2lVvvfWWoqOjNXv27FQTrFJc4jRhDdEH668mbPfDDz8kOs9I3t7e2rhxo9q1a6dTp07p0KFDev/995O0y5cvn2bNmpVhq2etKVy4sFauXKlWrVrp4sWLOnz4cJKYKlasqLlz55rvtYeHh9UxZ82aJUlyc3OTv79/psQNAACQ1QoVKqQPP/xQ7733njZt2qTg4GCFhobKw8ND3t7eatCggXLkyJHVYQIAsrEMTbAm99e+jOTt7Z2p4+PhuLq6auXKlfrpp580Y8YMHTx4UJGRkSpcuLBq166t3r17q3nz5lZvlc9qYWFheumllxQdHS0HBwfNmDFDnp6eKbafMmWKqlSposuXL+ujjz5S06ZNVbVqVfO6g4ODhg0bpldeeUU//fST1qxZoxMnTujmzZtycnJSgQIFVL58eT3zzDNq27atqlSpkuw8Tk5OmjdvnhYuXKgff/xRf/31l8LCwlS0aFE1btxYb775pipXrqxhw4Zl9FuSaQoUKKCOHTvql19+0S+//KJx48aZdURT4ufnp+HDhyc6T6mdxWIxNwXL6ASrFLcq9tChQ5oyZYoWLVqkAwcO6ObNm/Lw8FCZMmXUpk0bDRw40Or3J6NVq1ZNhw8f1tdff61ff/1VZ86ckZOTk0qXLq2uXbtq4MCBypUrl1mrN1++fCmOFR4ersWLF0uSAgICzBIgAAAAj6scOXJkyr8bgcdFjhw5VLJkyUTnAOJYDGvbkqeTg4NDptXos1gsaarTCODRsWvXLtWqVUuStHjxYrVr1y6LI3r8HTx4UJUrV5YUV1Zg8ODBybabMWOGAgIC5ODgoMOHD8vX1zfDYzl//rxKlCghKa52bPHixTN8DgAAAAAA4mXW76EZvsmVYRiZ9gDweKlZs6ZatWolSfrPf/6TxdH8OyQsLZFS6QLDMPTf//5XktS9e/dMSa4CAAAAAPC4yNASAQ0bNrS6gjU6Olpbt26VFPcLvIODg0qVKqX8+fNLkq5du6bTp08rNjZWFotFFotF9erVY9k58BgbPXq0li9frm3btmnFihVq0aJFVof0yNq2bZtq1KghJyenZK//+OOPmjRpkiTp6aefVvXq1ZNtt2DBAh06dEguLi6JSjIAAAAAAICkMjTBun79+hSvXbhwQZ07d5ZhGHryySf16aefyt/fX+7u7onahYeH69dff9WoUaN0/PhxxcTE6JdfflGRIkUyMlQA2US1atXUp08fTZw4UcOGDSPB+hA+/PBDHT58WK1atVL16tVVuHBhRUdH6/Tp01q8eLF27dolSXJ0dNSECROSHcMwDHOTt8GDB6t06dJ2ix8AAAAAgEdRhtZgTUlUVJSeeeYZ7d69W61atdL8+fPl6upqtc/9+/fVqVMnLVu2TNWrV9eWLVtSXJWFR99vv/1mc99y5cqpXLlyGRfMYyY4OFh79uyxuX/z5s2VK1euDIwImcXPz08bNmyw2iZ37tyaPXu2WrdubaeoUkYNVgAAAODRERsba26YK0menp5ycMjwypNApsqs30MzdAVrSqZOnapdu3apQIECmjVrVqrJVUlycXHRrFmzVKZMGf3111+aPHmy+vXrZ4dokRU6dOhgc9+hQ4dq2LBhGRfMY2bt2rXq1auXzf3PnDmTaKdIZF/ffvut5s6dqy1btigkJETXr19XRESE8ubNK19fX7Vo0UL9+/eXl5dXVocKAAAA4BETERGh7777zjx/++23k9yVDPxb2SXBOmvWLFksFvn7+yt37txp7pc7d2516NBBP/74o+bMmUOCFQCsqFy5sipXrpzVYQAAAAAA8K9ilwTr8ePHJcmmVXA+Pj6JxsDjyQ6VKv61evbsqZ49e2Z1GAAAAAAAAI8luxTLuHXrliTpxo0b6e4b3yd+DAAAAAAAAADILuySYC1cuLAMw9Dvv/+ern6GYeiPP/6QJBUqVCgzQgMAAAAAAAAAm9klwdq4cWNJcbf5jxgxIs39Ro8eraNHj8pisZhjAAAAAAAAAEB2YZcE68CBA+XgEDfV8OHD1bNnTwUHB6fYPiQkRL169dLQoUPjgnRw0GuvvWaPUAEAAAAAAAAgzeyyyVX16tX10UcfadSoUbJYLJoxY4Zmzpypp556StWqVZOXl5ck6fr169q/f7/27NkjwzDMjY8++OADVa9e3R6hAgAAAAAAAECa2SXBKkkjRoyQs7OzRowYoejoaBmGoT179mjPnj1J2sYnVnPkyKFPPvlEn376qb3CBAAAAAAAAIA0s0uJgHhDhgzRrl271KlTJzk5OZmrVB98ODk5qXPnztq5cyfJVQAAAAAAAADZlt1WsMarWrWq5s2bp/DwcO3atUvHjx/XzZs3JUmenp4qW7asatasKTc3N3uHBgAAAAAAAADpYvcEazw3Nzf5+fnJz88vq0IAAAAAAABAGjg6OqpSpUqJzgHE4acBAAAAAAAAVrm6uqpjx45ZHQaQLdm1BisAAAAAAAAAPE7snmCNiYnRrFmz1LVrV5UuXVq5c+dWjhw5NGbMmCRtf/vtNy1atEjbtm2zd5gAAAAAAAAAkCq7lgjYvXu3unfvrtOnT5vPGYYhi8WSbPupU6fq999/l5eXly5cuEB9DwAAAAAAAADZit1WsG7dulUNGzbU6dOnZRiGDMNQ/vz5rfYZOHCgDMPQ9evXtWrVKjtFCgAAAAAAgIRiYmJ09uxZ8xETE5PVIQHZhl0SrBEREerUqZPu3bsnSRo8eLD++ecfXb582Wq/Jk2aKG/evJKklStXZnaYAAAAAAAASMbdu3cVFBRkPu7evZvVIQHZhl0SrD/++KMuXboki8WiMWPG6KuvvlKRIkVS7ZcjRw7Vrl1bhmFoz549dogUAAAAAAAAANLOLgnWpUuXSpLKli2rQYMGpatvxYoVJUknTpzI8LgAAAAAAAAA4GHYJcF6+PBhWSwWtWzZMt198+XLJ0m6detWBkcFAAAAAAAAAA/HLgnWGzduSJIKFy6c7r6GYWR0OAAAAAAAAACQIeySYM2dO7ckKSwsLN19//nnH0mSl5dXhsYEAAAAAAAAAA/LLgnWEiVKSJL279+f7r7r16+XxWKRr69vRocFAAAAAAAAAA/FLgnWZ599VoZhaNWqVbp8+XKa+/355586evSoOQYAAAAAAAAAZCd2SbAGBATIYrEoMjJSvXv3VkxMTKp9Tpw4od69e0uSnJ2d1bNnz0yOEgAAAAAAAADSxy4J1ipVqqhHjx4yDEN//vmn/Pz8tHHjxmQ3sDp16pRGjx6tWrVq6dKlS7JYLHr99ddVtGhRe4QKAAAAAAAAAGnmaK+JJkyYoGPHjmnbtm3aunWrGjduLFdXV/P68OHD9emnn+ru3buSZCZfGzdurM8//9xeYQIAAAAAAABAmtllBaskubi4aO3aterTp4+kuATq3bt3ZbFYJElhYWGKiIiQYRgyDOP/2rvv+Cjq/I/j700HElroEALSgoBUAQFpUkUFaaJgQEAQy/FTbJwi2NA7OeVAKechIIogcCKgAioBCyBVmvQaqtTQQkny/f2Ry9wu2WySzWZ3A6/n47EPZjLf+c5n5juTyX74zndks9k0cOBAffvttwoMDPRWmAAAAAAAALhBcHCwGjdubH2Cg4N9HRLgN2zG2XP6uWznzp2aOHGili1bpm3btjkMFVCxYkW1bdtWTz75pO644w5vhwYA8JLDhw8rKipKkhQfH69y5cr5OCIAAAAAwM0st76Hem2IAHvVqlXT2LFjJUkpKSk6c+aMkpKSFBkZmeH/gCQlJSkoyCfhAgAAAAAAAIBTXhkiYOHChRkHEBCgYsWKqVSpUi6Tqz179syt8AAAAAAAAADALV5JsPbq1UsrV650a93k5GT17NlTX3/9tYejAgAAAAAAAICc8UqCNTExUffff7+2bduWrfWSk5PVq1cvzZ8/P3cCAwAAAAAAQKauX7+urVu3Wp/r16/7OiTAb3glwRoYGKhz586pQ4cOio+Pz9I6ycnJevjhhzVv3jxJsgagBQAAAAAAgHddvXpV8+bNsz5Xr171dUiA3/BKgnXixIkyxujo0aNq166dTp065bJ8cnKyHnnkEc2dO1dSanI1Li7OG6ECAAAAAAAAQJZ5JcE6cOBAvfnmmzLGaNeuXerUqZMuXbrktGxKSor69OmjOXPmSJLKlSunuLg4VaxY0RuhAgAAAAAAAECWeSXBKkmvvPKKnn76aRljtG7dOnXt2lVJSUkOZdKSq7Nnz5YklS1bVnFxcbrtttu8FSYAAAAAAAAAZJnXEqySNG7cOPXs2VPGGP3www+KjY21lqWkpCg2NlazZs2SlJpcXbZsmSpVquTNEAEAAAAAAAAgy7yaYJWkGTNm6J577pExRrNnz9bQoUNljFFsbKxmzpwpSSpTpox+/PFHValSxdvhAQAAAAAAAECWeT3BGhwcrPnz56t+/foyxujDDz9U7dq19cUXX0iSSpcurWXLlqlq1areDg0AAAAAAAAAssXrCVZJKlCggL799ltVrlxZxhht3bpVxhiVLFmS5CoAAAAAAACAPMMnCVZJKl68uJYuXarSpUtLkkqWLKnly5erWrVqvgoJAAAAAAAAALIlyJOV9e/fP9vrVKhQQcePH1flypX1t7/9LcNyNptNU6ZMyUl4AAAAAAAAAOBRHk2wTps2TTabza11V65cqZUrV7osQ4IVAAAAAAAAgD/xaIJVkowxnq5SktxO3AIAAAAAACBnQkJC1KZNG4d5AKk8mmCNi4vzZHUAAAAAAADwAyEhIWratKmvwwD8kkcTrC1atPBkdQAAAAAAAADg1wJ8HQAAAAAAAAAA5FUkWAEAAAAAAADATR5/yRUAAAAAAABuLteuXdOGDRus+Xr16vGiK+C/fJZgNcYoISFBFy9eVEpKSpbWKV++fC5HBQAAAAAAgBtdu3ZNS5YsseZr1qxJghX4L68mWBMSEjRp0iT95z//0ZYtW3T16tUsr2uz2ZSUlJSL0QEAAAAAAABA9ngtwbpy5Up1795dJ06ckJTagxUAAAAAAAAA8jKvJFiPHTume++9V+fPn7d+FhERocqVKysiIkI2m80bYQAAAAAAAACAR3klwfr3v/9d58+fl81mU5UqVfTPf/5T7dq1I7EKAAAAAAAAIE/zSoJ18eLFkqTChQvrp59+UokSJbyxWQAAAAAAAADIVQHe2Eh8fLxsNpt69OhBchUAAAAAAADATcMrCdbg4GBJUoUKFbyxOQAAAAAAAADwCq8kWNMSqwkJCd7YHAAAAAAAAAB4hVcSrF26dJExRitWrPDG5gAAAAAAAADAK7ySYB0yZIiKFy+u3377Td9++603NgkAAAAAAAAAuc4rCdYSJUpo1qxZyp8/vx5++GF99dVX3tgsAAAAAAAAPCA0NFRdunSxPqGhob4OCfAbQd7YyKeffipJevzxxzV27Fh1795dtWvXVseOHRUdHa2wsLAs1RMbG5ubYQIAAAAAAMCJ4OBg1a5d29dhAH7JZowxub2RgIAA2Ww2a94Y4zCfFTabTUlJSZ4ODQDgI4cPH1ZUVJQkKT4+XuXKlfNxRAAAAACAm1lufQ/1Sg9WKTWp6moeAAAAAAAAAPIaryRYR44c6Y3NAAAAAAAAAIBXkWAFAAAAAACAS1evXtXPP/9szd9999286Ar4L68NEQAAAAAAAIC86fr16/r111+t+caNG5NgBf4rwNcBAAAAAAAAAEBeRYIVAAAAAAAAANxEghUAAAAAAAAA3OSTMVjj4uL0008/aefOnTp37pyuXLmS6To2m00//vijF6IDAAAAAAAAgKzxaoJ1+fLlGjx4sPbs2ZOt9YwxstlsuRQVAAAAAAAAALjHawnWefPmqVevXkpJSZExJtPyNpstS+UAAAAAAAAAwFe8Mgbr6dOn9dhjjyk5OVk2m01/+ctftGrVKg0fPlxSajJ137592rhxo2bMmKHOnTunBhcQoJEjR2r//v3at2+fN0IFAAAAAAAAgCzzSg/WyZMn6+LFi7LZbPrHP/6hoUOHSpJ++eUXq0yFChUkSbVr11bv3r21bNkydevWTW+88YYKFiyoZ5991huhAgAAAAAAAECWeaUH6/fffy9Jio6OtpKrmWndurVmzpwpY4yGDx+urVu35maIAAAAAAAAAJBtXkmw7tixQzabTW3bts2wTHJycrqfdezYUY0aNdL169c1ZcqU3AwRAAAAAAAAALLNK0MEnD17VpJUvnx5h5+HhIRY04mJiQoPD0+3bosWLfTbb79ZvWABAAAAAADgXWFhYerTp4/DPIBUXkmwBgUF6fr16woMDHT4eUREhDV9/PhxVa5cOd26BQsWlCQdPXo0d4MEAAAAAACAU0FBQapUqZKvwwD8kleGCChRooSk//VkTRMVFWVNb9myxem6Bw4ckJTawxUAAAAAAAAA/IlXEqy33367jDHauXOnw8/r1Kkjm80mSZo7d2669S5evKj58+dLksqUKZPrcQIAAAAAAABAdnglwdqsWTNJ0qpVqxx+HhkZqWbNmskYo1mzZmncuHFKSkqSJB0+fFjdunXTqVOnZLPZ1KpVK2+ECgAAAAAAAABZZjPGmNzeyObNm63eqj/++KNatmxpLfvhhx/Url07qydrWFiYwsPDderUKUmSMUYhISFav369atSokduhAgC85PDhw9ZQMfHx8SpXrpyPIwIAAACQkStXrmjx4sXWfIcOHXjRFfKc3Poe6pUerHfccYd69+6tdu3aaePGjQ7L2rRpo5EjR8oYI2OMEhMTderUKWs+MDBQkyZNIrkKAAAAAADgI0lJSdq0aZP1SXsCGYAU5K0NzZgxI8NlI0eOVNOmTTV+/HitXr1aZ8+eVWRkpJo3b64XXnhBDRo08FaYAAAAAAAAAJBlXkuwZqZNmzZq06aNr8MAAAAAAAAAgCzzSoL1/PnzkiSbzaaIiAhvbBIAAAAAAAAAcp1XxmAtXLiwihQpovbt23tjcwAAAAAAAADgFV5JsAYFpXaUbdGihTc2BwAAAAAAAABe4ZUEa6lSpSRJBQsW9MbmAAAAAAAAAMArvJJgjYmJkSQdOHDAG5sDAAAAAAAAAK/wSoL1oYcekjFGCxcu1JUrV7yxSQAAAAAAAADIdV5JsPbt21f16tXTiRMn9PTTT3tjkwAAAAAAAACQ67z2kquvvvpKderU0dSpU9WqVSvFxcXJGOONzQMAAAAAAABArgjyxkZat24tSQoJCZExRj/99JPatGmjfPnyqUqVKipUqJACAlznem02m3788UdvhAsAAAAAAAA7+fLl05AhQxzmAaTySoJ1+fLlstlskmT9a4zR5cuXtXnz5kzXN8ZY6wEAAAAAAMC7AgMDVaJECV+HAfglryRYJWU4HADDBAAAAAAAAADIq7ySYN2/f783NgMAAAAAAAAAXuWVBGt0dLQ3NgMAAAAAAAAAXuW1IQIAAAAAAACQNyUmJmru3LnWfPfu3XnRFfBfJFgBAAAAAADgUnJysvbt2+cwDyBVgK8DAAAAAAAAAIC8igQrAAAAAAAAALiJBCsAAAAAAAAAuIkEKwAAAAAAAAC4iQQrAAAAAAAAALiJBCsAAAAAAAAAuIkEKwAAAAAAAAC4iQQrAAAAAAAAALiJBCsAAAAAAAAAuIkEKwAAAAAAAAC4KcjXAQAAAAAAAMC/FShQQC+88II1ny9fPh9GA/gXEqwAAAAAAABwyWazKX/+/L4OA/BLDBEAAAAAAAAAAG4iwQoAAAAAAAAAbiLBCgAAAAAAAABuYgxWAAAAAAAAuHT58mVNnz7dmu/bty9jsgL/RYIVAAAAAAAALqWkpOjPP/90mAeQiiECAAAAAAAAAMBNJFgBAAAAAAAAwE0kWAEAAAAAAADATSRYAQAAAAAAAMBNJFgBAAAAAAAAwE0kWAEAAAAAAADATSRY/cxnn32mNm3aqHjx4goKCpLNZpPNZtPy5cslSaNGjbJ+duDAAZ/G6kk36365q1+/ftbxyKmWLVvKZrOpQoUKOQ8sFyQlJalWrVqy2WwaPny4r8O55R0+fFihoaGy2WxaunSpr8MBAAAAAMDvBfk6APzPoEGD9PHHH/s6jJvStWvXtHnzZq1du9b6bN++XcnJyZKkuLg4tWzZ0rdB3qI++ugjbd26VREREXr++ed9Hc4tr1y5chowYIAmTpyooUOHasuWLQoK4lYB3AqSk5P1888/6+DBg7p48aLCw8MVHR2tu+++W4GBgb4ODwAAAPBbfGv2E+vWrbOSq8WKFdPQoUNVtWpVhYSESJJq1qzpy/DyvLvuuksbNmzwdRi4wcWLF/XWW29Jkp555hlFRkb6OCJI0vDhwzVlyhTt2LFD06ZN08CBA30dEoBcdOLECU2ZMkWTJ0/WoUOH0i0vX768Bg8erAEDBqhkyZI+iBAAAADwbwwR4CcWL15sTY8fP16vvvqqevbsqS5duqhLly4qVqyYpNRH6Y0xMsb47SPf/iitp2qaqKgolSpVykfRIM348eN16tQphYaG6rnnnvN1OPivqKgo9e7dW5L01ltvKSkpyccRAcgtEyZMUHR0tF555RWnyVVJOnTokF555RVFR0drwoQJXo4QAAD4k4CAAOsD4H+4IvxEfHy8NV23bl0fRnJzat26tUaNGqVFixbpxIkTOnTokNq3b+/rsG5p169f1/jx4yVJnTp1oveqn4mNjZUkHTx4UPPmzfNxNAByw6uvvqqnnnpKV69edfh5tWrV1KhRI1WrVs3h51evXtVTTz2lESNGeDNMAADgJ8LDwzVixAjrEx4e7uuQAL/BEAF+wv7LTWhoqA8juTm9//77vg4BN5g/f76OHTsmSerTp4+Po8GNmjdvrnLlyunw4cOaOHGiHnroIV+HBMCDJkyYoLffftuaj4iIUP/+/fXEE08oJibG+vmOHTs0adIkffLJJ7pw4YKk1J7tpUuX1pNPPun1uAEAAAB/RA/W/zp79qymT5+uvn376o477lChQoUUHBysyMhINWrUSK+++qqVDPKUAwcOWG+Knz59uvXzihUrWj+32WwaNWqUtWzUqFHWzw8cOOBQ39SpU61ld999d7rH4u19/fXXVtnatWun672SZtOmTRo6dKhq1aqlIkWKKCwsTFFRUerevbu+/vrrLO/rvHnz1KFDB5UoUUL58uXTbbfdpoEDB2rLli1ZruNmkZKSoilTpqh58+aKjIxUgQIFVK1aNT377LPp2jQzV69e1fvvv6+GDRuqUKFCKliwoGrWrKkRI0bo5MmTWaqjX79+6c6pOXPmqH379ipVqpTy58+v22+/XaNGjVJCQoLDukePHtXw4cNVo0YNhYeHq0iRImrTpo2+++67TLf72WefSZIKFy6sTp06OS1jf75v3brVaZlJkyZZZcLCwnTlyhWn5Z544gnZbDYFBASkOzYtW7a06pCkpKQk/etf/1LLli1VqlQpBQYGOn0J2p9//qmRI0eqYcOGKlasmEJDQ1W2bFl16tRJn3zySaaP1t+4XWOMZs6cqXvuuUclSpRQWFiYKlWqpCFDhmT46O6Ndu7cqcGDB6tixYoKCwtTqVKl1LZtW33xxReSHH/v2P9uuVFAQIB69eolSfrpp5+yvH0A/u/EiRMOw7JER0dr3bp1Gjt2rENyVZJiYmI0duxYrV27VtHR0dbPn3vuOZ04ccJrMQMAAAB+zcCcOXPGhISEGEkuP/nz5zdffvmlx7a7f//+TLcpyYwcOdJaZ+TIkdbP9+/fn67OHj16WMvfeOMNp9s9evSoiYyMNJJMWFiY2bp1a7oy165dM08++aQJCAhwGVv79u1NQkJChvt47do107NnzwzXDw0NNZ9++mmm+5Ub+vbta20zLi7OK9s8f/68adGiRYbHo1ChQmbp0qUOsWXkyJEjpkaNGhnWVbZsWbNhwwZre9HR0U7rsd/W7t27zcMPP5xhnbVq1TKnT582xhjz008/WeeRs8/777+fYeyXL182YWFhRpLp2LFjhuXi4uKs+saPH++0zI3nV0ZtWbVqVSPJ1KhRI90y+zY5efKkady4cbr9adGihcM6c+bMMQULFnR5fdSsWdPs27cvw/2z3+7ly5fN/fff7/Lc+O233zKsyxhjPv30UxMaGpphHd26dTO7du1y+rvFmW+++SbT458T8fHxVv3x8fEerx+Ac2+//bZ17UVERJidO3dmab0dO3aYiIgIa93Ro0fncqQAAACAZ+XW91CGCFDqC5CuXbumcuXKqU2bNrrjjjtUsmRJ2Ww2xcfHa+XKlVqwYIEuX76sRx55RFFRUWrcuHGOt1uiRAl99dVXkqRx48YpLi5OkjR58mSVKFHCKndjbxJXJk+erFWrVunw4cN644031K5dOzVq1MhaboxR3759dfr0aUnS3//+d9WoUcOhDmOMunfvrgULFkiSypUrp4cffli1atVSaGio9u7dqxkzZmj79u1asmSJHnjgAf34448KDAxMF8+QIUP05ZdfSpLy5cunxx57TI0bN5YxRj///LOmT5+uAQMGqG3btlnex7zKGKOuXbtqxYoVkqQiRYpo4MCBqlOnjq5cuaIlS5Zozpw56tmzp+rUqeOyritXrqhdu3batm2bJKls2bIaMGCAqlevroSEBH311VdasmSJunbtqsKFC2c5xuHDh2vu3LmqW7eu+vTpo3LlyunQoUP66KOPdODAAW3ZskV/+ctf9MYbb+jee+9VcnKyhgwZorvuuksBAQFavHixPv/8cxlj9OKLL6pt27aqWbNmuu38+uuvVk/TO++8M8N4GjdurNDQUF29elVxcXF6+umn05VJO55pli9fnq636bFjx7Rr1y5JctoT1V6fPn20evVq3XnnnXrooYcUFRWlU6dO6fjx41aZhQsX6qGHHlJKSookqW3bturSpYsiIyO1d+9eTZ06VXv27NHWrVt19913a+PGjSpevLjL7fbv318LFy5U/fr11atXL5UvX16nTp3S559/rpUrVyohIUEPP/yw/vjjD6fDiCxdulT9+vVziOnBBx9U0aJFrZjmzZtn9ZbNioYNG1rT33//vdPjDyBvSU5O1qRJk6z5/v37q2rVqllat1q1anrsscc0btw4SalPELz44otO7/8AAADArYQEq6QCBQrohx9+0D333JNhmc2bN6t9+/Y6fvy4XnzxRf3000853m7+/PnVpUsXSanjUaZp166dKlSo4FadRYoU0YwZM3TPPfcoKSlJvXv31u+//24NPv3BBx/o+++/lyR17NhRzzzzTLo6xo4dayVX+/fvr48++khhYWEOZV544QUNGjRIU6dO1YoVKzR58uR0Y7HFxcVpypQpkqRixYpp+fLlDsnc2NhYDRw4UG3bttW3337r1v7mJdOmTdMPP/wgSapcubKWL1+usmXLWsv79++vRx55RN27d9fy5ctd1vXOO+9YydXGjRtr8eLFKlSokLV88ODBmjBhgp566qlsxTh37lw99dRTGjdunMNbIfv166fatWvr6NGjmjVrlrZu3aoCBQpo2bJluv32261yvXv3VrVq1TRixAglJSXpww8/dPgin2bNmjXWtH0S70ZhYWFq3LixVqxYoZ9++knGGIcE4fbt261HVBs3bqzVq1c7PXb2P8sswbpkyRINHz5cb7/9ttNkZEJCggYMGKCUlBTZbDZNmDBBTzzxhEOZ5557Tr169dLXX3+tI0eO6Omnn9bs2bNdbnfWrFl65ZVX9Oabbzps94knnlC3bt00f/587du3T1999ZX16H6a69eva/DgwVZydfz48emSoc8995x69OihuXPnuozDXrFixXTbbbdp3759Dm2WVYcPH3a53NPDrnjDxYsXdfHiRV+HAbht5cqVDi/WHDJkSLbWHzJkiJVgPXTokL7++ms1adLEozEC3hQeHs5LWgAgiy5evKiJEyda80OGDOF3KJDGY31hbwGffPKJ1Y344MGDHq3b/jFtV4/IZ/VR+pdfftkq169fP2OMMb///rv1+HCJEiXM8ePH062XmJhoihcvbiSZJk2amJSUlAy3ce3aNVOpUiUjyVSuXDnd8vvuu8+KYc6cORnWM3HiRIfHmG/WIQJq1aplJBmbzWbWrFmTYbmXXnrJ4Xjc6MqVK6ZYsWJGksmXL5/Lc/Ghhx6y6snKEAG1atUy169fd1pu9OjRDnFl1KaJiYnWI6QVK1Z0WsZ+GILdu3dnGL8xjuf8pk2bHJZNmDDBSDKRkZFm5syZ1rATiYmJDuUGDRpkHfs///wz3TbsH9Vv3Lixy/P+gw8+sMo+9thjGZa7cOGCKVu2rJFkAgICnA4VYL/d1q1bZ1jX7t27XW7zyy+/dBgGICNnz551GNYhsyECjDHm3nvvdRg+ITvsz5fMPnlliAD785EPn7z+qVatmlvXQbVq1XweOx8+nvpk5V4IAEh14cIFM2rUKOtz4cIFX4cEZFtuDRHAS66ywb6Hhju9ubzpjTfeUIMGDSSl9pycMWOGHnnkEetlVp988olKliyZbr3FixdbLwB67rnnXD5OHBwcbL1ZfM+ePQ4vaEp75F2Sypcvr27dumVYT//+/VWkSJHs7WAes2/fPuuFXk2bNnX5WPz//d//uXzc8tdff9WpU6ckSd26dVP58uUzLDts2LBsxTl48GAFBTnv2N60aVNrumTJkuratavTcmFhYda5d+DAASUmJqYrY//CpKJFi7qMyb7H6Y29U9OG1WjevLlatWolKfXFX6tWrXJa7vbbb8/0Uf0hQ4a4PO/ThvWQpBdffDHDcuHh4Vav7pSUFKtXeEaGDh2a4bLKlSsrKipKkvTHH3+kW27/wrm//OUvGdZTuHBhPfrooy7juJH9tXnw4MFsrQvAv2VnCBl79k9MAAAAAGCIAAcHDx7UtGnTtHz5cu3YsUPnzp3L8I3kR44c8XJ02RMcHKyZM2eqbt26unTpkmJjY61lTz75ZIZvbf/555+t6dOnTzsMXeDMmTNnrOnt27dbQxts2rRJ169fl/S/N6VnJCQkRE2bNtWiRYsy2608a+3atdZ069atXZYtVaqUqlevrq1bt+a4rgYNGqhgwYI6f/58luJ09bi+fUK+fv36DkMIZFTWGKNz584pX758DsvTzhubzZbpF/wbx2G1TyCmjb/aqlUrlSpVSjExMdqxY4eWL19uJVyPHj2q3bt3S8p8eABJLh91NcZo3bp1klLHJs5sfOS2bdvqlVdekST99ttvLstmNq5z2bJlFR8fr7Nnz6Zbtn79ekmp19Jdd93lsp4WLVpo7NixLsvYs0+AO9u2K/aPITtz7Ngxl+ccgNx17tw5t9ZLSEjwbCAAAABAHkeC9b/Gjh2rl19+2erhmZmsJqx8qUqVKvrnP/+pgQMHWj+7/fbbNWbMmAzXse+hNnjw4Gxtzz75cvToUWu6cuXKma6blTJ5mTvHI6MEa3bqstlsuu222/T7779nKc7IyMgMl9m/WMlVuRvLOrum0n6WP39+l4laKeNxWP/44w/9+eefkv6XOG3ZsqWVYE2TnfFXJTmMi3ujhIQEXb58WVLq9ZUZ+xfH2L8ky5lixYq5XJ52TJ0dz7Rzoly5cgoODnZZz2233eZy+Y0KFixoTTvrjexKuXLlslU+L3j++efTjbkL5CUrV660nirZuXOnduzYka2XaW7fvl07d+605ufNm8cYrMjTGDsQAAB4AglWSZ9//rmeffZZSVJAQIDuueceNWnSROXLl1d4eLhCQkIkSX/++aeVdExOTvZZvNlxYxLo3nvvTdeb0F5OeqVcu3bNmr506ZI1nT9//kzXLVCggNvbzQs8eTxy89hmluzMbrmMpCULL1++rOTk5EzfQN2yZUutWLFCZ86c0aZNm1SnTh0rcVqsWDHVrFnTKjdp0iT99ttvunLlisLCwqxyNptNLVq0yDQ2V9eH/cuNsnJc7ctcuHDBZdmcHNO0cyI3rjX73wmujs2tgpehIK/r3Lmzypcvbw3VMmnSpGz1ard/cWH58uXVuXPnTH+HAwAAADc7xmCVNGLECElSRESEVq9eraVLl2rUqFHq37+/evbsqS5duqhLly4OY1DmBQkJCQ5DA0ipPXVdPapsnzg4evSojDFZ/vTr189a1z6Jk9bjzxX7pOHNyJPH42Y4tmmPnRtjsvTYubNxWNPGVW3RooU1BEVauatXr2rlypUO5bIy/mpm7K+PrBxX+zIRERE52rYraedEbpwP9sOAZDZeLgD/FxgY6PCEyieffOLQI9WVnTt3aurUqdb8E088QXIVAAAAEAlW7du3T/v375ckDRo0yOXLh+xf4pQXDBkyxHrkv0uXLrLZbEpKSlKfPn0ceuLZs388+vDhw25vu0yZMtb0nj17Mi2flTJ5mSePR3bqMsZo3759WYjQu9LG6pUcE3gZSRuHVfpfwtR+/NU0JUuWVPXq1SWlJmKPHDliHaOsDA+QmUKFClm9RNPGdXXFvkzp0qVzvP2MpJ0Thw8ftsY+zkh2zwf7BHh0dHT2gwPgdwYMGGD9Tr1w4YLat2+faZJ1586dat++vdUbPzQ0VAMGDMj1WAEAAIC84JZPsJ44ccKazmw8y8WLF+d2OB4zY8YMffHFF5Kku+66S3PnzrXeKL9nz54M3zTevHlza3rp0qVub7927drWWJDLly+XMSbDsteuXdOvv/7q9rbyAvvEfVqCMCPHjx/X9u3bPVLX+vXr/XK84LRH+iVlqedU2jisUuqL2LZs2aKTJ09KckywSv9LpC5fvjzb469mxmazqUGDBpJSk5k7duxwWf7777+3pnPzZU7169eXlHotrVq1ymXZtMR0VqXtY+nSpTMdexdA3lCyZEm9//771vzBgwd15513aujQoel+r+3YsUNDhw7VnXfe6TBO+/vvv68SJUp4LWYAAADAn93yCVb7MQtd9QY8dOiQpk2b5oWIcu7AgQN6+umnJaU+lvzZZ58pMDBQb7/9turUqSNJmjp1qubNm5du3XvvvddKoowfP95KYmVXWFiYOnToICn12H311VcZlp02bVq2306e19x2222qVauWJOmXX36x3vruzLhx41yO8dusWTPrhUjz5s1z2dPY/gu0P2nUqJE17WrICntpCdKzZ89a4wWWKFFCt99+u9Nya9as0XfffScp6+OvZkXXrl2t6ffeey/DcpcuXdKECRMkpY6v+sADD3hk+8507tzZmh43blyG5c6dO6cZM2Zkud6TJ09aPfxzM0EMwPuefPJJvfrqq9b8hQsXNG7cOFWvXl0xMTFq1KiRYmJiVL16dY0bN85hHOlXX31VTz75pC/CBgAAAPzSLZ9grV69ujV+4b///W/t3bs3XZmjR4/qgQceyPCxen+SnJysPn36WL0Wx48fb701PCQkRDNnzrReVDNo0CAdOXLEYf3w8HC99tprklJ793bo0MHl0AjGGP344496++230y1Le3GYlDpcgbNemWvXrtULL7yQvZ3Mo9KOhzFGjzzyiI4dO5auzKJFizRmzBiX9YSEhFhfbC9fvqyHHnrIaS/VyZMnW72Y/U2TJk2s627NmjVZWse+B+qnn36a7mc3lrt69apmz54tyTPjr6Z57LHHrLqmTp2qjz/+OF2Zq1evKjY21kp+9+jRQxUrVvTI9p3p3LmzNezCvHnz9OGHH6Yrc+XKFT366KM6ffp0luu1b5t27drlOE4A/uXNN9/URx99ZA0XkGbnzp1as2ZNuicMQkND9dFHH+nNN9/0ZpgAAACA3wvydQC+FhISosGDB+v9999XQkKC6tSpo0GDBql27dqy2Wxau3atpk+frvPnzys2NtZK7Pirt99+23rcvmfPnurbt6/D8urVq2vMmDF66qmndObMGfXt21fff/+99ZIgSfrLX/6iNWvW6PPPP9eGDRtUrVo1denSRXfffbdKliyp69ev68SJE9q0aZO+//57HT16VPfcc49eeeUVh221atVKAwYM0JQpU/Tnn3+qfv366t+/vxo3bixjjH7++WdNnz5dxhjde++9+vbbb3PtuGzcuDFdj92NGzda01OmTNEPP/zgsPz5559X4cKFPRZDv379NHPmTP3www/atWuXatasqYEDB6pOnTq6cuWKli5dqtmzZ6tQoUKqU6eOw+PtNxo+fLjmzZunbdu2aeXKlapRo4YGDhyomJgYJSQkaP78+fruu+9UoUIFFS5cWL///rvH9sMTQkND1aFDB82bN0+rV6/WlStXFBYW5nKdtHFYr169qqSkJEnOE6xpvVr/+OMPl+XcVbBgQU2ZMkVdunRRSkqKBg0apLlz5+rBBx9U0aJFtXfvXk2dOtUaf7Vs2bIaP368x7bvTEhIiCZPnqyOHTsqJSVFzzzzjBYuXOgQ0yeffKI9e/aoe/fumjt3rqTUnrWupA0nYLPZdP/99+fqPgDwjSeffFLdunXTJ598okmTJunQoUPpypQvX15PPPGEBgwYwLAAAADcwmw2mwoWLOgwD+C/DExiYqJp1aqVkZThZ/DgwWbv3r3W/MiRIz0aQ9++fa269+/fn2G5kSNHZlhu1apVJigoyEgy5cqVM2fOnMmwnk6dOln1jBkzJt3y5ORkM3LkSBMSEuLyuKR9YmNjnW7n2rVrpkePHhmuFxoaambMmOFyvzxh6tSpWdoP+09uxHH+/HnTvHnzDLdZuHBh8/333zucDxk5cuSIqVGjRoZ1lS1b1mzcuNG0aNHCSDLR0dFO68nqubd//36rXN++fV3uZ1bq/Oqrr6wyc+bMcVlfmrR9Sfts377dabknn3zSoVxm9dvXm1Vz5swxERERLs+hGjVqmH379nlku5m1ozHGTJ8+3YSGhmYYT7du3cwff/xhzb///vsZ1pWSkmKioqKMJNOyZctM43NHfHy8FUt8fHyubANA1iUlJZm4uDgzffp08+GHH5rp06ebuLg4k5SU5OvQAAAAAI/Ire+ht/wQAVLqeKFLly7VhAkTdNdddykiIkKhoaGKjo5Wz549tWTJEk2aNCnT3l6+dPHiRfXp00dJSUkKCAjQjBkzVKRIkQzLf/LJJypZsqQk6a9//as2bdrksDwgIECjRo3S3r179dprr6lp06YqUaKEgoODlT9/fkVHR6tDhw566623tGnTJk2fPt3pdoKDg/Xll19q7ty5ateunSIjIxUaGqqKFSuqf//+Wrt2rfr06eO5A+HnIiIiFBcXp3//+99q1qyZChcurPz586tq1aoaOnSoNm7cqDZt2mSprjJlymj9+vX6xz/+oQYNGigiIkLh4eGqUaOGXnnlFW3cuNEac9cf3XfffSpXrpwk6bPPPsvSOvY9UUuVKqWYmBin5exffOXJ8Vftde/eXXv27NGIESPUoEEDFSlSRMHBwSpdurQ6dOigKVOm6Pfff8/VoQFuFBsbq02bNmngwIGKjo5WaGioSpQooXvuuUczZ87U3LlzHYaTKFq0aIZ1rVixQvHx8ZJSh/gAcPMLDAxUy5YtFRsbq6eeekqxsbFq2bKlAgMDfR0aAAAA4Ndsxrh4vTsA5KL33ntPL774okJCQnTkyBHr5V3IPR9++KGeeeYZSdL69etVr149p+UGDBigTz75RNHR0dqzZ4+Cgjw/oszhw4cVFRUlSYqPj7cS7gAAAAAA5Ibc+h7qv10yAdz0nnzySRUvXlzXrl3TP/7xD1+Hc9NLSkrSv/71L0mpvVdr1arltFx8fLzVq/jVV1/NleQqAAAAAAA3CxKsAHymQIECGjFihKTUnpXZecM90kt7KZUz165d0+OPP64tW7ZISn3pWnBwsNOy7777rq5du6aYmBj169cvN0IFAAAAkMcYY5SSkmJ9eCAa+B+GCADgU0lJSapbt662bt2ql19+We+8846vQ8qzbDabqlatqo4dO6pGjRoqUqSILl++rK1bt2rWrFnWmKpRUVHasmWLChUqlK6OI0eOqFKlSrp69aqWLFmidu3a5Vq8DBEAAAAA5B0XL150ePJw2LBhCg8P92FEQPbl1vdQnvvMofnz57u9bkxMTIYv6YF06NAhbdiwwe3127Vrp/z58980cdysgoKCrF6VyLldu3Zp165dGS6vVq2aFi1a5DS5Kklly5bVlStXcis8AAAAAABuOiRYc+jBBx90e92RI0dq1KhRngvmJrNs2TI99thjbq+/f/9+VahQ4aaJA8jMd999pwULFmjdunU6fvy4Tp8+reTkZBUrVkx169ZVly5dFBsbm+HQAAAAAAAAIPtIsALATaJDhw7q0KGDr8MAAAAAAOCWQoI1hxjCNvf069fPL16w4y9xAAAAAAAAwP8E+DoAAAAAAAAAAMirSLACAAAAAAAAgJtIsAIAAAAAAACAm0iwAgAAAAAAAICbSLACAAAAAAAAgJtIsAIAAAAAAACAm4J8HQAAAAAAAAD8W0BAgMqUKeMwDyAVCVYAAAAAAAC4lD9/fj3++OO+DgPwS/x3AwAAAAAAAAC4iQQrAAAAAAAAALiJBCsAAAAAAAAAuIkxWAEAAAAAAOBSSkqKLl68aM2Hh4fzoivgv0iwAgAAAAAAwKXLly/rgw8+sOaHDRum8PBwH0YE+A/+qwEAAAAAAAAA3ESCFQAAAAAAAADcRIIVAAAAAAAAANxEghUAAAAAAAAA3ESCFQAAAAAAAADcRIIVAAAAAAAAANxEghUAAAAAAAAA3ESCFQAAAAAAAADcRIIVAAAAAAAAANxEghUAAAAAAAAA3BTk6wAAAAAAAADg3wIDA1WtWjWHeQCpSLACAAAAAADApXz58qlXr16+DgPwSwwRAAAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbmIMVgAAAAAAALiUnJysEydOWPMlS5bkRVfAf5FgBQAAAAAAgEuJiYn6+OOPrflhw4YpPDzchxEB/oMhAgAAAAAAAADATSRYAQAAAAAAAMBNJFgBAAAAAAAAwE0kWAEAAAAAAADATSRYAQAAAAAAAMBNJFgBAAAAAAAAwE0kWAEAAAAAAADATSRYAQAAAAAAAMBNJFgBAAAAAAAAwE0kWAEAAAAAAADATUG+DgAAAAAAAAD+LSgoSPXq1XOYB5CKqwEAAAAAAAAuhYWF6f777/d1GIBfYogAAAAAAAAAAHATCVYAAAAAAAAAcBMJVgAAAAAAAABwE2OwAgAAAAAAwKWkpCTt3bvXmq9UqRIvugL+iysBAAAAAAAALl25ckWzZs2y5ocNG6bw8HAfRgT4D4YIAAAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADcF+ToAAMCtKSkpyZo+duyYDyMBAAAAkJlLly4pISHBmj9y5IgKFCjgw4iA7LP/7mn/nTSnbMYY47HaAADIorVr16phw4a+DgMAAAAAcAtas2aN7rzzTo/UxRABAAAAAAAAAOAmerACAHziypUr2rJliySpePHiCgry7ag1x44ds3rUrlmzRqVLl/ZpPEiPNvJvtI9/o338H23k32gf/0cb+Tfax7/dSu2TlJSkkydPSpJq1aqlsLAwj9TLGKwAAJ8ICwvz2OMYnla6dGmVK1fO12HABdrIv9E+/o328X+0kX+jffwfbeTfaB//diu0T4UKFTxeJ0MEAAAAAAAAAICbSLACAAAAAAAAgJtIsAIAAAAAAACAm0iwAgAAAAAAAICbSLACAAAAAAAAgJtIsAIAAAAAAACAm0iwAgAAAAAAAICbbMYY4+sgAAAAAAAAACAvogcrAAAAAAAAALiJBCsAAAAAAAAAuIkEKwAAAAAAAAC4iQQrAAAAAAAAALiJBCsAAAAAAAAAuIkEKwAAAAAAAAC4iQQrAAAAAAAAALiJBCsAAAAAAAAAuIkEKwAAAAAAAAC4iQQrAAAAAAAAALiJBCsAIM+5du2a1q1bp4kTJ6p///6qVauWgoKCZLPZZLPZtHz5cp/UZV/nxIkT1bJlS5UqVUphYWEqX768unXrpoULF/qsLm/y9+O6bNky9evXTzExMYqIiFBwcLAiIyPVuHFjvfTSS9q9e3eW67pw4YLee+89NW7cWMWKFVO+fPlUsWJFPfroo/rpp5+yu5te4e/tI0nGGC1YsEB9+vRRlSpVFBERoYiICFWuXFkdOnTQ3//+9yy1U168hvJC+9g7d+6cypQpY8Vns9myvG5evH4k/22jAwcOaOLEierVq5eqV6+uiIgIhYSEqESJEmrZsqVGjx6tEydOZCuuvNhG/to+/l6Xv4qLi9Ojjz6qSpUqKX/+/IqIiFBMTIyeeeYZbd68OVt13er3/9zgyfaRbu37f27wdPvYuxXv/y4ZAADymHr16hlJGX7i4uJ8Upcxxuzdu9fUqlXLZZ1du3Y1iYmJXq3L2/z1uF68eNF06dLFZT2STHBwsBk9enSmca1du9ZER0e7rOupp54yycnJ2drf3Oav7ZNm165dpmnTppm209ChQ70al7f4e/vcaODAgenqy4q8ev0Y459t1Llz50yvGUkmIiLCTJs2LUtx5dU28sf28fe6/NH58+dNt27dXO5fUFCQGTVqVKZ1cf/3PE+2T5pb/f7vSbnRPje6Fe//rpBgBQDkObVr13a4AUdFRZlSpUq59cXJk3WdPn3aVKlSxVq3evXq5r333jNffPGFefPNN01UVJS1rFu3bl6ryxf89bjed999VtmwsDAzaNAg89FHH5mZM2eav/3tb6ZZs2YOcX/44YcZ1rVv3z5TvHhxq2zDhg3NuHHjzMyZM83w4cNNZGSktWzYsGFZ3l9v8Nf2McaYbdu2mZIlS1rrNG7c2Lz11lvms88+M7Nnzzb//Oc/TWxsrImMjHT5BSsvX0P+3D43iouLMzabzQQEBJiwsLAsf8HKy9ePMf7ZRpUqVbLKNWrUyLz88stmypQpZvbs2eZvf/ubqVu3rkPMmSVZ83Ib+WP7+HNd/uj69eumTZs21j4UKFDAPP3002bGjBnm888/N88//7wpUqSItfztt992WR/3f8/ydPsYw/3fk3KjfW50q97/XSHBCgDIc5599lkzatQos2jRInPixAljjDF9+/Z164uTJ+t6+umnrfU6dOiQ7n/FT58+7fAF9z//+Y9X6vIFfzyuy5cvd/iyfeDAAafl/vWvf1nlIiMjzfXr152Ws/+y1r9//3T/y37gwAFTvnx5I8nYbDazfv36LO9zbvPH9jHGmEuXLllJogIFCrgsm5SUZI4ePeqVuLzNX9vnRomJidaX2KeeesqhN0pm8vL1Y4x/tlGNGjXMM888Y3bt2uV0eXJysnnhhResegoVKmROnz6dYVx5uY38sX38uS5/9OGHH1qxlytXzuzevTtdmaNHj5qYmBgjpfbE2759u9O6uP97nifbxxju/57m6fa50a18/3eFBCsA4Kbg7hcnT9V1/PhxExwcbP1hmPaF7kZbtmwxNpvNSDK1a9fO9br8ia+P61//+ldr+x999JHL7davX98qu3nz5nTL169fby0vX758ho+YffPNN1a5zp07u9ymr/m6fYwxDsmfBQsWuLEXuROXP/CH9rnRyy+/bCSZMmXKmISEhCx/wboZrx9jfN9GZ86cydI2GzZsaMU5ZcoUp2Vuxjbydfv4a13+qlq1alZ7LVy4MMNyv/32m1WuV69eTstw//c8T7aPMdz/Pc3T7XMj7v/O8ZIrAAA8YP78+bp+/bok6eGHH1aJEiWclqtZs6Zat24tSdq0aZPTQfo9WVde58ljcfLkSWu6SpUqLrdbtWpVa/rSpUvpls+ZM8eaHjRokMLCwpzW07FjR1WuXFmStHjxYl24cMHldvMaT7bPxYsXNWnSJEnSPffco/vvv98v4srLcvM4bNq0SWPGjJEk/fOf/1TBggWzHBfXz/94so2KFCmSpW127drVmt6yZYvTMrRRKn+9t9/sv+OOHj2qnTt3SpKKFi2qTp06ZVi2YcOGiomJkSQtWLBAly9fTleG+79nebp9uP97lqfb50bc/zNGghUAAA9YunSpNd2hQweXZe2XL168OFfryus8eSzs/8jO7A/ptOWBgYEOX7ayG5fNZlP79u0lSVevXnXrzdX+zJPtM2/ePOsP6N69e/tNXHlZbh2H5ORkDRw4UElJSerUqZO6d++eK3Hd7NeP5JtzNSIiwpq+cuVKjuK62dvIX+/tN/vvuMOHD1vTVapUyfTN5Gn36cuXL2vFihXplnP/9yxPtw/3f8/ydPvY4/7vGglWAAA8YOvWrdZ0/fr1XZZt0KCBNb1t27ZcrSuv8+SxeOCBB6zpd999VwcPHnRaz8cff6x169ZJkh599FEVLVrUYXlKSoq2b98uSQoKClLt2rVzFFde5sn2+fnnn63phg0b6sqVK3r//ffVoEEDFSpUSOHh4YqJidETTzyR6XHkGkqVW8fhgw8+0Lp165Q/f3599NFH2YqJ68eRL85V+22WL18+3XLa6H/89d7O77iM2R+bNNz//Yez9uH+7z+ctY897v+uBfk6AAAA8rqUlBTt3btXUmqPh3LlyrksHx0dbU3f2JPCk3XldZ4+Fg0bNtTTTz+tDz/8UPHx8YqJiVFsbKzq1KmjIkWKKD4+XgsWLNAvv/wiSXrwwQc1fvz4dPXEx8crMTFRklS2bFkFBbn+c+pmbSNPt0/al1pJSkpKUv369fXHH384lNm5c6d27typjz/+WKNGjdKIESNyPa68KreOw759+zRy5EhJ0qhRoxzWywqun//xxbl6/vx5zZ4925p31oOINkrlr/f2W+F3XKlSpazp3bt3yxjjshferl27rOm0R6Ptcf/3LE+3D/d/z/J0+6Th/p85EqwAAOTQhQsXlJycLEkqXLhwpn8wREZGWtPnzp3Ltbryutw4FuPHj1fFihU1evRonT59Wv/617/SlalXr57eeOMN3XvvvU7/ILWvu1ixYpnux83aRp5un+PHj1vTPXv21K5du1SuXDkNHDhQMTExSkhI0IIFC/TNN98oJSVFr732msLCwvTCCy/kalx5VW4dh8GDB+vy5cuqXbu2nn322WzHxfXzP744V//617/qzJkzkqT27durbt266crQRqn89d5+K/yOK1++vMqVK6fDhw/rzJkz+vbbbzMcR3LdunXasWOHNc/9P/d5un24/3tWblw/Evf/rGCIAAAAcsj+JQgZDdZuL1++fNb0xYsXc62uvC63jsXgwYM1evToDAfl37Bhg959912tWrXKq3HlNZ4+DvZ/PO/atUtNmjTRH3/8oZEjR+qhhx7SoEGDtGjRIv373/+2yr3yyivpHvWkfVLlxnGYNm2afvjhBwUEBGjy5MmZfnn1Vlx5lbePxezZs61HOsPDw5320PNFXP7KX+/tt0r7PP7449b0kCFDtG/fvnRlTpw4ob59+zr8zNXLcLj/e44n24f7v+d5+vrh/p819GAFAHjc2LFjPfI/jRUqVFC/fv1yXA/Su1XbaO3aterSpYuOHj2qOnXq6LXXXtPdd9+tQoUK6dixY1q0aJFGjRqlX375Rffcc49mzZqlzp07ez3OW7F9UlJSrOng4GB98cUXDi/jSTNgwAAtWbJEc+bM0fXr1zVp0iS988473gz1lmyfP//8U8OGDZMkPfHEE2rUqJGPI3LtVmwjV9asWaP+/ftb85MnT870beq5ifa5OeRmOz733HP68ssvtW3bNsXHx6tOnTp67LHH1LBhQwUEBOj333/Xv//9b505c0a33XablUAKCHDehyyv3P89Ka+0T166/3tSXmmfvHb/9ykDAICHRUdHG0k5/rRo0SLL2+zbt6+1XlxcXI7iz25d586ds8pHRkZmWj4hIcEqX69evVyry5W80EaePhabNm0y+fLlM5JMkyZNTGJiotN69u3bZyIjI40kExERYY4dO+aw/Pfff7e2U79+/Uzj2rRpk1W+a9eumZY35tZsnyJFiljLO3Xq5LKupUuXWmUbNmyYq3E5cyu2z0MPPWQkmdKlS5tz5845rcP+uGTEG9fPjbHcKm2Uka1bt1q/0ySZ9957z2V5fsel8td7u7fOm6zI7XaMj483DRo0cLluhw4dzKxZs6z5Xr16pasnL93/PSmvtE9euv97Ul5pn7x2//clhggAACCHIiIiFBgYKCn1MaekpCSX5U+fPm1NFy5cONfqyus8fSxefvlla3D9999/P8PHkypWrKjnn39eUuqjUtOmTXNYbl/3qVOnMtuNm7aNPN0+9j+rV6+ey7rsl6e90CK34sqrPHkcFi1aZL0YaezYsSpUqJDbcXH9/I83ztXdu3erTZs21rqvv/669fstI7RRKn+9t99Kv+PKlSun1atX67PPPtN9992nUqVKKSQkRJGRkWrdurU+++wzffvtt7p8+bK1jv0LftJw/88dnmof7v+5wxPtw/0/exgiAADgcQcOHPB1CF4VEBCgSpUqadeuXUpOTtbhw4dVoUKFDMvbjxl14yOanqzLlbzQRp48FlevXtUPP/wgKfUP8IYNG7rcdps2bTR8+HBJqY/W2ouKilK+fPmUmJioI0eOKCkpyeVYVO600a3WPpJUrVo17d+/X5Iy/QPefnlCQkKuxuXMrdY+U6ZMkSQVLVpUu3bt0ltvveW0Dvu2sC/zzDPPWG3mjetHuvXayJkDBw6odevW1gtkXn75Zb322muZrsfvuFT+em/31t8JWeGNdgwMDFTv3r3Vu3fvDMvYv3G+QYMGDsvy2v3fk/JC+0h56/7vSXmhffLi/d+XSLACAOABNWvW1K5duyRJ69evd/kH3bp16xzWy8268jpPHYtTp07p+vXrklK/YDl7O7A9+z/g7Qfll1L/gK9evbo2bNigpKQkbdq0SfXr13crrrzOk+fqHXfcocWLF0uSzp8/73K79n/IO/syxjWUylPHwRgjSTpz5oxGjBiRpW3bl+vTp4/VTlw/jnLrXD18+LBat26tw4cPS5KGDh2a5bEKaaP/8dd7O7/jHK1YsUKSZLPZ1KxZM4dl3P99z1X7SNz/fc1V+3D/zx6GCAAAwAPat29vTS9ZssRl2bQ/Im9cLzfqyus8dSzsX5Zw6tQpXblyxWVd9v9rHhkZ6XZcxhhreWhoqFq0aOFyu3mNJ8/VDh06WNPr1693WdeGDRus6apVq+ZqXHmZvx4Hrp//yY02On78uFq3bm31CBs8eLDGjh2bK3Hd7G3kr/d2f722fWHbtm1au3atJKlVq1aKjo52WM7937cyax+J+78vZaV9POmmv358OP4rAAAe48uXXBljzPHjx01wcLCRZMLDw82JEyecltuyZYux2WxGkqldu3au1+VPfH1co6KirO1//vnnLrc7YMAAq+w//vGPdMs3bNhgLS9fvnyGL8z45ptvrHKdO3d2uU1f83X7JCUlmTJlyhhJJjg42Bw8eDDD7fbo0cOKddSoUbkal7/wdftkRVZecmHMzXn9GOMfbXTy5ElTo0YNK47HHnvMpKSkZHtfbsY28nX7+GtdeVlSUpJp3bq11a7ffvut03Lc/30jq+3D/d83sto+WXGr3//TkGAFANwUfJ1gNcaYp59+2lqvY8eO6f5oOHPmjKlbt65V5j//+Y9X6vIXvj6uL7zwglWmWLFiZtOmTU7LzZgxw/qjOzQ01Bw6dMhpufvuu8+qb8CAASY5Odlh+cGDB0358uWNJGOz2cz69euzvM++4Ov2McaYyZMnW+WaNGlizp8/n67MlClTrDL58+fP8MvTzXYN+UP7ZCarX7CMufmuH2N830Znz551KPfII4+kO67ZcbO1ka/bx5/r8lc///yzSUpKcrosISHB9OrVy9q/hx9+OMN6uP/nDk+1jzHc/3ODJ9snM7f6/T+NzZj/DqoAAEAesXHjRs2bN8/hZwsXLtTmzZslpY73c+MjLs8//7zTN1B6sq4zZ86ocePG2r17tySpevXqGjhwoMqWLas9e/Zo8uTJio+PlyR169ZNc+fOzXAfPVmXL/jjcT1z5ozq1atnPf4XGhqqhx56SC1atFDBggV17NgxLVq0SEuXLrXWeeutt/TKK684rW/fvn1q3LixTp48KUlq1KiRHn30UUVGRmrLli2aPHmy9QbUYcOGacyYMS6PmTf5Y/tIUnJysu677z7rsb2oqCgNHDhQMTExSkhI0IIFC7Ro0SKr/PTp0xUbG+u0rrx8Dflr+2SmQoUK1vWV2VeMvHz9SP7ZRk2aNNGqVaskpbbFmDFjrDdqZ6RYsWJOx0SU8nYb+WP7+HNd/qpmzZo6c+aMOnXqpHr16ql48eI6f/68fv/9d82ePVt//vmnJKlp06b67rvvHIYDsMf9P3d4qn0k7v+5wZPtk5lb6f7vkm/zuwAAZN/UqVOt//nM6mf//v25Xpcxxuzdu9fUqlXL5fpdu3bN8JGY3KrL2/z1uO7Zs8eh10JGn8DAQPP6669nup9r1651+F97Z5+nnnoqR73IcoO/to8xxly8eNF06dLFZV2hoaHm3//+d6Z15dVryJ/bx5Xs9GAxJu9eP8b4ZxtlNx5JpkWLFi73M6+2kT+2j7/X5Y/sh7tw9rHZbKZfv37mwoULmdbF/d/zPNk+xnD/9zRPt48rt9L93xUSrACAPMefE6zGGHP16lUzYcIE07x5c1OiRAkTEhJiypUrZx588EHz9ddfZ2tfPVmXN/nzcb1+/bqZM2eO6dmzp6lUqZIpUKCACQoKMkWKFDENGzY0L774otm1a1eW60tISDB///vfTcOGDU3RokVNaGioqVChgundu7dZsWJFluvxJn9unzSLFi0yPXv2NNHR0SY0NNQULFjQ1K5d27z44osmPj4+y/XkxWsoL7SPM9n9gmVM3rx+jPHPNspuPFLmCVZj8mYb+WP75IW6/M2KFSvMiy++aO666y5Trlw5ExoaagoXLmxq1Khhhg4datatW5et+rj/e5an2yfNrXz/96Tcah9nbqX7vysMEQAAAAAAAAAAbgrwdQAAAAAAAAAAkFeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAAAAAAAAN5FgBQAAAAAAAAA3kWAFAAAAAAAAADeRYAUAAACQq6ZNmyabzSabzaZRo0b5OhynWrZsacV44MCBXNlGWv0VKlTIlfr9UWJioipWrCibzaZBgwY5LdOvXz/r2Cxfvty7AXrAb7/9ZsX/zTff+DocAIAPkGAFAAAAAOSKd999VwcOHFD+/Pn9NrmeU40aNVK3bt0kSf/3f/+nq1ev+jgiAIC3kWAFAAAAAHjc8ePHNWbMGEnSwIEDVaZMGR9HlHtGjBghSdqzZ48mTZrk42gAAN5GghUAAAAA4HFvv/22Ll++rMDAQA0bNszX4eSq2rVrq2PHjpKk0aNH6/Llyz6OCADgTSRYAQAAAAAedfr0aU2ZMkWS1KlTJ5UvX97HEeW+J554QpL0559/avr06T6OBgDgTSRYAQAAAAAe9fHHHysxMVGS1L9/fx9H4x2dOnVSyZIlJUnjxo3zcTQAAG8iwQoAAADApV9++UVPPvmkatSooaJFiyo0NFRly5ZVp06dNGXKFF2/fj3H23D2JvlNmzZp8ODBqlq1qgoUKKCiRYvqrrvu0tixY7P1IqF169bpscceU4UKFRQWFqZSpUqpefPmmjx5sq5du5bj2HPD+fPnNWbMGLVu3VqlS5dWaGioihUrpgYNGmj48OHav39/tur7+eefNWjQIMXExKhgwYLKly+foqOj1b17d33xxRdKSUnxaPzTpk2TJEVERKhDhw4erXvRokV69NFHValSJRUoUEDh4eGqVKmSHn30UX3zzTfZqmv16tWKjY11ODdatGihjz/+2Do3WrZsaZ2bBw4cyLCuwMBAPfjgg5KkHTt2aPXq1W7vIwAgjzEAAAAA4MTp06fNfffdZyS5/FStWtVs3749w3qmTp1qlR05cqTTMn379rXKxMXFmXHjxpmgoKAMt1mtWjWzZ8+eTPdhxIgRJiAgIMN6GjRoYA4fPmxatGhh/Wz//v1uHjHX0uqPjo52WW7RokWmePHiLo95SEiIeeeddzLd5sWLF0337t0zbcPatWubvXv3emQ/N27caNX74IMPZlr+xrbPyIkTJxzaKaNP69atzcmTJzPd7osvvmhsNluG9TRs2NAcOXIkW+fGokWLrLL/93//l2kMAICbQ1COM7QAAAAAbjonT55Us2bNtGvXLklSeHi4OnTooNtvv11hYWE6dOiQvvnmG8XHx2vXrl1q2rSp1q9frwoVKuR42wsXLtT7778vSWrRooWaNWumkJAQbd68WYsWLdLVq1e1c+dOtWrVSuvWrVOJEiWc1vPWW2/pzTfftOarVq2q++67T0WLFtWBAwc0f/58rVu3Tj169FBAgH883Dd//nx1795dycnJkqQSJUqoc+fOio6O1pkzZ/Tdd99p+/btunbtmoYPH66EhAS98847Tuu6du2a2rZtq1WrVlk/a9mypZo2baqQkBBt3bpVCxcu1JUrV7Rp0yY1adJEv/32m6Kjo3O0D999953D9jzh7Nmzatq0qfbs2SNJCgoKUvv27VW/fn1J0vr167VkyRIlJSVp2bJlatasmX777TcVKlTIaX0jRozQ3//+d2u+evXq6tixo4oWLapDhw7p66+/1po1a9SjRw/ZbLYsx3n33XcrMDBQycnJ+vbbb/XBBx/kYK8BAHmGrzO8AAAAAPxP+/btrZ54sbGx5ty5c+nKXLt2zQwbNswqd/fddzutK7s9WG02m8mXL5/55ptv0pXbvn27qVixolW2Z8+eTuvbuHGjQw/Y1157zSQnJzuUOXv2rLn33nutbSqLvRTdlVZ/Rj1Yjx07ZooUKeKwb+fPn3cok5KSYt555x2HY/X99987re/ll1+2ykVERDgtt2fPHlO9evVM2zA7OnToYNW3cuXKTMtnpQdrr169rDJlypQxGzZsSFdm/fr1plSpUla5Pn36OK1rzZo1Dr2aR48ebVJSUhzKXLhwwer5m91zw/54Hj9+PNPyAIC8zz/+mxYAAACA31iyZImWLFkiSerevbumT5/utCdgcHCwxowZo65du0pKHefz559/zvH2jTGaOHGi7r333nTLYmJitGjRIgUHB0uSvvzyS+3cuTNdudGjRyspKUmS1KdPH73++uvpeqkWLlxYc+fOVeXKlWWMyXHcOfXPf/5TZ8+elSTdeeed+vzzzxUREeFQxmaz6eWXX9YzzzwjKfVYjRw5Ml1dZ8+edXjR0meffaY2bdqkK1epUiUtXrxYBQoUkJTaht9//32O9mP9+vXWdM2aNXNUl5Q6nuns2bMlpY5zunDhQtWtWzdduXr16mnBggVWO3/++edWD2x77777rjXm7MCBAzV8+PB0vVTDw8M1c+ZM1axZM9vnRq1atazptWvXZmtdAEDeRIIVAAAAgIMJEyZISk3m/e1vf8u0/LBhw6zpBQsW5Hj7VatWVd++fTNcfvvtt6t3797W/Keffuqw/OLFi5o/f76k1H2wHybgRvny5dNrr72Ws4A9ZPr06db06NGjFRSU8Yhur7/+usLCwiRJK1eutB6dTzNnzhxdvnxZktS0aVM98MADGdZVvnx5Pf30007jyK6EhASdPHlSklS0aNF0CWJ3fPrpp1aSs1evXqpXr16GZe+880716NFDUmry+cZz48KFC9Y5GhAQoFGjRmVYV3BwsEaMGJHteO2HWNi9e3e21wcA5D0kWAEAAABYUlJStGLFCklS5cqVddttt2W6Tp06dazpdevW5TiGLl26ZFomrdeslJpgtLd27Vpdv35dklS3bt1Mx4Xt0qWLz8dg3b9/v44dOyZJKlKkiFq3bu2yfJEiRdS2bVtr/pdffnFY/uuvv1rT3bt3z3T7PXv2zLCu7Dh8+LA1Xbp0abfrsefJfVm7dq3Vs7l+/foqW7asy7ruu+8+BQYGZidclSpVypqOj4/P1roAgLyJBCsAAAAAy4EDB5SQkCAptfedzWbL9JP2eLkkq/diTtgnbDNyxx13WNM3DhFg/1h4VuqKiIjIUiI5N9n3dKxdu3aWEr72PTlv7ClpP+/scfob1apVy+oxe/DgQStBnV3nz5+3pvPnz+9WHTfK7r5k9bjYP8qfkfz582f73LC/Hi5cuJCtdQEAeRMJVgAAAACW06dP52j9S5cu5TiGyMjITMsUK1bMmk4bt9TZfFbqurE+X7CPOaux2Jc7c+ZMjuoLDg52GGf3xvqyKq13qCSXQxxkR3b3JavHJavnRlbLpbHfb3cT1QCAvMUzdzwAAAAANwX7BFn58uWtlylllbOXYeHWYd9r9cqVKz6MxHcSExOtafverACAmxcJVgAAAAAW+956BQoU0PPPP+/1GLLSi/bUqVPWdJEiRRyW2c9ntUeufX2+kNOYixYtmqP6rl+/bg0N4ay+rCpevLg17W4v2BsVKVLEGp/29OnTmSYtXR2XwoULW9NZPc7Z7dVt30vW/ngAAG5eDBEAAAAAwBIdHa18+fJJkvbs2WO9id6bNm3alGmZzZs3W9PVqlVzWFa1atVs1XXx4kXt27cvGxF6XpUqVazpTZs2yRiT6TobN260pu33+cb67MtlZOvWrVbv5QoVKig4ODjTdZwpW7aste7Ro0eztB+Zye6+ZPW4bNmyJdO6Ll++nO1z48iRI9Z0Zi9YAwDcHEiwAgAAALCEhobq7rvvlpTaq3HevHlej2H+/PmZlvnqq6+s6SZNmjgsu/POO60k34YNG3Tw4MFMt5eSkpL9QD2oYsWKKlOmjKTUnp/Lli1zWT4hIUHff/+9Nd+0aVOH5fbzc+fOzXT7c+bMybCu7AgMDFSNGjUkpZ4/nkhce3Jf7rzzTmuM1PXr1zskQ51ZtGiRkpOTsxOuduzYYU3Xrl07W+sCAPImEqwAAAAAHDz11FPW9CuvvKKTJ09meV1P9FjcuXOnZsyYkeHyHTt26PPPP7fmH330UYfl4eHh6tKlixXPa6+9lmFdV65c0ZtvvpmzgD0kNjbWmn711VddJvZGjRpljfXZtGlTVa5c2WF5jx49rPFQf/nlF33zzTcZ1nX48GF9+OGH1ny/fv3cCd/SqFEjazorPYgzExsbK5vNJkmaNWuWyzo3bNigL7/8UpJks9nUt29fh+UFCxbU/fffL0lKSUnR66+/nmFd169f11tvvZWtWI0xVu/q/Pnzq2bNmtlaHwCQN5FgBQAAAODggQceUNu2bSVJ8fHxatasmVatWpVh+cTERP3nP/9RixYttGHDhhxv32az6YknntDixYvTLdu1a5fuv/9+Xbt2TVJqIjEmJiZduZdfflmBgYGSpE8//VRvvPFGul6qCQkJ6tGjh3bt2mUl8Hxp6NCh1pihq1ev1qOPPqqLFy86lDHG6L333tPYsWMlpR6rUaNGpaurSJEiGjp0qDXfu3dvp71i9+3bpw4dOujChQuSpLvvvltt2rTJ0X60a9fOmv7ll19yVJckxcTEqFevXpJSX8J2//33O02ybty4Uffff7+VmO7du7fDkABpXnrpJQUEpH4V/vjjj/Xuu++m+4+Bixcvqnfv3tqyZUu2zo1t27ZZY7C2atXK7aEWAAB5Cy+5AgAAAJDOrFmz1Lx5c23btk27du1SkyZNVK9ePTVt2lSlSpVScnKyTp06pS1btmjNmjW6dOmSJM/0YP2///s/ffDBB+rYsaNatWqlZs2aKTg4WJs3b9bChQt19epVSanjfY4fP95pHfXq1dOIESOs5OPIkSP1xRdfqFOnTipatKgOHjyor776SidPnlTjxo0VGBioX3/9Ncex50SpUqU0ZcoUde/eXcnJyfriiy+0bNkyde7cWdHR0Tpz5oy+++47/fHHH9Y6L730UoYJ0VGjRmn58uVatWqVEhIS1KZNG7Vs2VJNmzZVSEiItm7dqoULF1o9YUuWLOmy53BWtWvXTvny5VNiYqJ++OGHHNcnSR999JHWrl2rPXv2KD4+Xg0aNFCHDh1Uv359SamP+y9evNgaR7ZatWoOvXLtNWrUSC+++KLeffddSdLw4cM1Y8YMdezYUUWLFtWhQ4c0f/58nThxQk2aNJHNZrPOjcySrfZJ7AceeCDH+w0AyCMMAAAAADhx4cIFExsbawICAoykTD9ly5Y1e/bsSVfP1KlTrTIjR450uq2+fftaZeLi4szYsWNNUFBQhtuqUqWK2bVrV6b78Ne//tVl/PXq1TOHDx82LVq0sH62f//+HB4559Lqj46Odllu4cKFplixYi6PdUhIiBk9enSm27x48aLp1q1bpm1Xu3Zts3fvXg/tqTG9evWy6t69e7fLsje2fUZOnDhhmjdvnum+tGrVypw8eTLTGIcNG2ZsNluG9TRo0MAcOXLENGvWzPrZiRMnXNaZdh6Fhoaas2fPZhoDAODmwBABAAAAAJwKDw/X9OnT9ccff2j48OFq0qSJSpYsqeDgYIWFhals2bJq1aqVXnrpJS1btkyHDh1SpUqVPLLtoUOH6rffftOAAQNUqVIl5cuXT4ULF1ajRo30j3/8Q5s3b3b6+PeN3n77ba1evVqxsbGKiopSSEiISpQooWbNmunDDz/UypUrVbZsWY/E7Cn33Xef9u7dq/fee08tW7a0jnnRokVVt25dvfTSS9qxY4eGDx+eaV0FChTQ3LlztWLFCg0YMEBVqlRReHi4QkNDFRUVpa5du+rzzz/Xhg0bdNttt3lsHwYPHmxNz5w50yN1lihRQitWrNCCBQv0yCOPqGLFisqfP7/y58+vihUrqnfv3lq4cKGWLVumYsWKZVrfmDFj9Msvv6h3797pzo0JEybol19+UZkyZZSQkGCtU6hQoQzrO3z4sH7++WdJqUNXFC5cOMf7DADIG2zGeOAZHgAAAADIgX79+mn69OmSpLi4OLVs2dK3ASHH6tWrp40bNyoqKkoHDhywxj3NS5KSklSwYEElJiaqVKlSOnbsWIZlX3/9dWtIig0bNqhu3bpeihIA4Gt57w4HAAAAAPB7r732mqTUF6XNmzfPx9G4Z9myZdYYtWnjvTpz5coVTZw4UVJqD2SSqwBwayHBCgAAAADwuC5duqhhw4aSZL1QKi9JTk7WyJEjrfkuXbpkWHbq1Kk6ceKEAgIC9NZbb3khOgCAPyHBCgAAAADIFePGjZPNZtOGDRs0d+5cX4djWbx4scaOHavz5887XX7q1Cn16NFDq1evliSVLFlSvXr1clr28uXLevPNNyVJjz/+uGrXrp07QQMA/FaQrwMAAAAAANycGjVqpClTpujgwYO6fPmyr8OxnDp1Ss8++6z++te/qnnz5rrjjjtUtGhRXbp0Sdu2bdPSpUt16dIlSVJAQID+9a9/KTw83Gld+/bt06BBgyRJzzzzjNf2AQDgP0iwAgAAAIAT8fHxmj17do7qeOihhxQVFeWhiPKmxx57zNchZCgxMVFLlizRkiVLnC4vVKiQPvnkEz3wwAMZ1lGzZk3VrFkzt0IEAOQBJFgBAAAAwIm9e/fqhRdeyFEdDRo0uOUTrP6oW7duCgsL09KlS7V582adOHFCJ0+eVFJSkiIjI3X77berXbt2evzxx1W4cGFfhwsA8HM2Y4zxdRAAAAAA4G+WL1+uVq1a5aiOuLg4tWzZ0jMBAQAAv0SCFQAAAAAAAADcFODrAAAAAAAAAAAgryLBCgAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbiLBCgAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbiLBCgAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbiLBCgAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbiLBCgAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbiLBCgAAAAAAAABuIsEKAAAAAAAAAG4iwQoAAAAAAAAAbvp/dHVnZCXoXiEAAAAASUVORK5CYII=\", \"__metadata__\": {\"image/png\": {\"width\": 684, \"height\": 335}}}"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "cNyH",
+ "code_hash": "ab0265e37728950a86f064ba471f9f31",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ISrK",
+ "code_hash": "ff9b1e2c64a6155b1168dd9f33cfba84",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "dict_keys(['response', 'list_params', 'choices', 'description', 'likelihoods']) "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "xqqY",
+ "code_hash": "318e2b05490a9ca912db794e97b922f7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/json": "{\"analytical\": {\"loglik\": \"text/html:function logp_ddm
Compute analytical likelihood for the DDM model with `sv`.
def logp_ddm(data: numpy.ndarray, v: float, a: float, z: float, t: float, err: float = 1e-15, k_terms: int = 20, epsilon: float = 1e-15) -> numpy.ndarray:
\", \"backend\": null, \"bounds\": {\"v\": [\"text/plain+float:-inf\", \"text/plain+float:inf\"], \"a\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"]}, \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"extra_fields\": null}, \"approx_differentiable\": {\"loglik\": \"ddm.onnx\", \"backend\": \"jax\", \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"bounds\": {\"v\": [\"text/plain+float:-3.0\", \"text/plain+float:3.0\"], \"a\": [\"text/plain+float:0.3\", \"text/plain+float:2.5\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:2.0\"]}, \"extra_fields\": null}, \"blackbox\": {\"loglik\": \"text/html:function outer
def outer(data: numpy.ndarray, *args, **kwargs):
\", \"backend\": null, \"bounds\": {\"v\": [\"text/plain+float:-inf\", \"text/plain+float:inf\"], \"a\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"]}, \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"extra_fields\": null}}"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "Cvjs",
+ "code_hash": "e4cb95b58d3a2ba1322185fbdb3eef97",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/json": "{\"loglik\": \"text/html:function logp_ddm
Compute analytical likelihood for the DDM model with `sv`.
def logp_ddm(data: numpy.ndarray, v: float, a: float, z: float, t: float, err: float = 1e-15, k_terms: int = 20, epsilon: float = 1e-15) -> numpy.ndarray:
\", \"backend\": null, \"bounds\": {\"v\": [\"text/plain+float:-inf\", \"text/plain+float:inf\"], \"a\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:inf\"]}, \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"extra_fields\": null}"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "gYFm",
+ "code_hash": "6741a30ab07cdde0b83f25183911d143",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/json": "{\"loglik\": \"ddm.onnx\", \"backend\": \"jax\", \"default_priors\": {\"t\": {\"name\": \"HalfNormal\", \"sigma\": \"text/plain+float:2.0\"}}, \"bounds\": {\"v\": [\"text/plain+float:-3.0\", \"text/plain+float:3.0\"], \"a\": [\"text/plain+float:0.3\", \"text/plain+float:2.5\"], \"z\": [\"text/plain+float:0.0\", \"text/plain+float:1.0\"], \"t\": [\"text/plain+float:0.0\", \"text/plain+float:2.0\"]}, \"extra_fields\": null}"
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "ZVxg",
+ "code_hash": "97ad532806fa080719e1f6418c0342b3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "RzLA",
+ "code_hash": "48a755a65c405641490d7d8870624547",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "'approx_differentiable' "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "CtVA",
+ "code_hash": "95060699c18374f33bcd1fa541579ae2",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": "\n \n \n
\n \n \n \n posterior \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 72kB\nDimensions: (chain: 2, draw: 1000)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 8kB 0 1 2 3 4 5 6 7 ... 993 994 995 996 997 998 999\nData variables:\n a (chain, draw) float64 16kB 1.418 1.445 1.445 ... 1.395 1.372 1.47\n v (chain, draw) float64 16kB 0.6098 0.6081 0.6081 ... 0.551 0.6293\n t (chain, draw) float64 16kB 0.5252 0.5528 0.5528 ... 0.5774 0.5437\n z (chain, draw) float64 16kB 0.4547 0.4463 0.4463 ... 0.4674 0.4626\nAttributes:\n created_at: 2026-01-08T04:20:31.918047+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 104.0833387374878\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (4)
a
(chain, draw)
float64
1.418 1.445 1.445 ... 1.372 1.47
array([[1.41832234, 1.44482895, 1.44482895, ..., 1.45080145, 1.42270343,\n 1.429684 ],\n [1.43845082, 1.4091634 , 1.42214607, ..., 1.39488567, 1.37237101,\n 1.46972711]], shape=(2, 1000)) v
(chain, draw)
float64
0.6098 0.6081 ... 0.551 0.6293
array([[0.60983178, 0.60812628, 0.60812628, ..., 0.6169344 , 0.60076136,\n 0.59421429],\n [0.59519145, 0.54608138, 0.62611476, ..., 0.57601093, 0.55095015,\n 0.62927693]], shape=(2, 1000)) t
(chain, draw)
float64
0.5252 0.5528 ... 0.5774 0.5437
array([[0.52519195, 0.55281282, 0.55281282, ..., 0.49019437, 0.552287 ,\n 0.53569464],\n [0.53855816, 0.54366838, 0.53073461, ..., 0.56866116, 0.5774497 ,\n 0.54366023]], shape=(2, 1000)) z
(chain, draw)
float64
0.4547 0.4463 ... 0.4674 0.4626
array([[0.45468805, 0.4462848 , 0.4462848 , ..., 0.4246702 , 0.45489589,\n 0.45722831],\n [0.45205031, 0.46443635, 0.44011694, ..., 0.45442414, 0.46739781,\n 0.46262934]], shape=(2, 1000)) Attributes: (8)
created_at : 2026-01-08T04:20:31.918047+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 104.0833387374878 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n sample_stats \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 268kB\nDimensions: (chain: 2, draw: 1000)\nCoordinates:\n * chain (chain) int64 16B 0 1\n * draw (draw) int64 8kB 0 1 2 3 4 5 ... 995 996 997 998 999\nData variables: (12/18)\n divergences (chain, draw) int64 16kB 0 0 0 0 0 0 ... 0 0 0 0 0 0\n step_size_bar (chain, draw) float64 16kB 0.6537 0.6537 ... 0.6519\n smallest_eigval (chain, draw) float64 16kB nan nan nan ... nan nan\n acceptance_rate (chain, draw) float64 16kB 0.9808 0.9816 ... 0.6121\n perf_counter_diff (chain, draw) float64 16kB 0.01429 ... 0.02977\n n_steps (chain, draw) float64 16kB 3.0 3.0 3.0 ... 3.0 7.0\n ... ...\n diverging (chain, draw) bool 2kB False False ... False False\n energy_error (chain, draw) float64 16kB -0.3712 0.05687 ... 0.3708\n largest_eigval (chain, draw) float64 16kB nan nan nan ... nan nan\n tree_depth (chain, draw) int64 16kB 2 2 2 3 3 3 ... 3 2 2 2 2 3\n max_energy_error (chain, draw) float64 16kB -0.3712 -0.3294 ... 0.9064\n energy (chain, draw) float64 16kB 994.6 992.6 ... 997.9\nAttributes:\n created_at: 2026-01-08T04:20:31.942338+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n sampling_time: 104.0833387374878\n tuning_steps: 1000\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions:
Coordinates: (2)
Data variables: (18)
divergences
(chain, draw)
int64
0 0 0 0 0 0 0 0 ... 0 0 0 0 0 0 0 0
array([[0, 0, 0, ..., 0, 0, 0],\n [0, 0, 0, ..., 0, 0, 0]], shape=(2, 1000)) step_size_bar
(chain, draw)
float64
0.6537 0.6537 ... 0.6519 0.6519
array([[0.65367279, 0.65367279, 0.65367279, ..., 0.65367279, 0.65367279,\n 0.65367279],\n [0.65191933, 0.65191933, 0.65191933, ..., 0.65191933, 0.65191933,\n 0.65191933]], shape=(2, 1000)) smallest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, ..., nan, nan, nan],\n [nan, nan, nan, ..., nan, nan, nan]], shape=(2, 1000)) acceptance_rate
(chain, draw)
float64
0.9808 0.9816 ... 0.9732 0.6121
array([[0.98081552, 0.98157116, 0.19925539, ..., 0.90362236, 0.84772244,\n 0.92964332],\n [0.80628372, 0.92010318, 0.88047044, ..., 0.9851636 , 0.97324921,\n 0.61209282]], shape=(2, 1000)) perf_counter_diff
(chain, draw)
float64
0.01429 0.01541 ... 0.01537 0.02977
array([[0.01429147, 0.01540693, 0.01416676, ..., 0.02790864, 0.02789033,\n 0.0275192 ],\n [0.02492774, 0.01223654, 0.02583136, ..., 0.01417914, 0.01536506,\n 0.02977307]], shape=(2, 1000)) n_steps
(chain, draw)
float64
3.0 3.0 3.0 7.0 ... 3.0 3.0 3.0 7.0
array([[3., 3., 3., ..., 7., 7., 7.],\n [7., 3., 7., ..., 3., 3., 7.]], shape=(2, 1000)) process_time_diff
(chain, draw)
float64
0.05243 0.05412 ... 0.05388 0.1039
array([[0.05242715, 0.05411698, 0.0517771 , ..., 0.10116927, 0.10085791,\n 0.09961226],\n [0.09369906, 0.04642883, 0.09405901, ..., 0.05197126, 0.05388416,\n 0.10394045]], shape=(2, 1000)) lp
(chain, draw)
float64
-992.0 -992.2 ... -993.6 -993.4
array([[-991.95133557, -992.21468616, -992.21468616, ..., -991.95401749,\n -991.75718205, -991.43078329],\n [-991.31137482, -991.94606888, -991.44895709, ..., -992.42524695,\n -993.56600872, -993.43725829]], shape=(2, 1000)) perf_counter_start
(chain, draw)
float64
277.8 277.8 277.8 ... 355.8 355.9
array([[277.75593712, 277.77049001, 277.78612287, ..., 301.28593947,\n 301.3140806 , 301.342201 ],\n [328.42188222, 328.44699881, 328.45943412, ..., 355.83499288,\n 355.84937534, 355.86492939]], shape=(2, 1000)) index_in_trajectory
(chain, draw)
int64
1 2 0 2 2 -2 2 ... 2 -2 -2 -1 1 -2
array([[ 1, 2, 0, ..., -5, 2, 2],\n [ 2, 2, -4, ..., -1, 1, -2]], shape=(2, 1000)) reached_max_treedepth
(chain, draw)
bool
False False False ... False False
array([[False, False, False, ..., False, False, False],\n [False, False, False, ..., False, False, False]], shape=(2, 1000)) step_size
(chain, draw)
float64
0.8472 0.8472 ... 0.4976 0.4976
array([[0.84715429, 0.84715429, 0.84715429, ..., 0.84715429, 0.84715429,\n 0.84715429],\n [0.497558 , 0.497558 , 0.497558 , ..., 0.497558 , 0.497558 ,\n 0.497558 ]], shape=(2, 1000)) diverging
(chain, draw)
bool
False False False ... False False
array([[False, False, False, ..., False, False, False],\n [False, False, False, ..., False, False, False]], shape=(2, 1000)) energy_error
(chain, draw)
float64
-0.3712 0.05687 ... 0.01096 0.3708
array([[-0.37123466, 0.0568736 , 0. , ..., 0.06343429,\n -0.09844004, -0.01735771],\n [-0.11387191, 0.00135024, 0.04394423, ..., -0.22784025,\n 0.01095564, 0.37083384]], shape=(2, 1000)) largest_eigval
(chain, draw)
float64
nan nan nan nan ... nan nan nan nan
array([[nan, nan, nan, ..., nan, nan, nan],\n [nan, nan, nan, ..., nan, nan, nan]], shape=(2, 1000)) tree_depth
(chain, draw)
int64
2 2 2 3 3 3 3 3 ... 2 2 3 2 2 2 2 3
array([[2, 2, 2, ..., 3, 3, 3],\n [3, 2, 3, ..., 2, 2, 3]], shape=(2, 1000)) max_energy_error
(chain, draw)
float64
-0.3712 -0.3294 ... -0.1054 0.9064
array([[-0.37123466, -0.32938252, 2.5514354 , ..., 0.18184042,\n 0.44771754, 0.18840013],\n [ 0.39841025, 0.15040319, 0.19784654, ..., -0.22784025,\n -0.10539883, 0.90635435]], shape=(2, 1000)) energy
(chain, draw)
float64
994.6 992.6 995.4 ... 994.3 997.9
array([[994.58915505, 992.6142443 , 995.40156256, ..., 993.10443917,\n 993.57978184, 992.28744494],\n [993.81549312, 992.29992582, 993.26118399, ..., 992.84829217,\n 994.2576829 , 997.89621551]], shape=(2, 1000)) Attributes: (8)
created_at : 2026-01-08T04:20:31.942338+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 sampling_time : 104.0833387374878 tuning_steps : 1000 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n \n observed_data \n
\n \n
\n \n\n\n \n \n \n \n\n \n \n \n \n \n \n \n
<xarray.Dataset> Size: 12kB\nDimensions: (__obs__: 500, rt,response_extra_dim_0: 2)\nCoordinates:\n * __obs__ (__obs__) int64 4kB 0 1 2 3 4 ... 496 497 498 499\n * rt,response_extra_dim_0 (rt,response_extra_dim_0) int64 16B 0 1\nData variables:\n rt,response (__obs__, rt,response_extra_dim_0) float64 8kB 6...\nAttributes:\n created_at: 2026-01-08T04:20:31.950959+00:00\n arviz_version: 0.22.0\n inference_library: pymc\n inference_library_version: 5.26.1\n modeling_interface: bambi\n modeling_interface_version: 0.15.0 Dimensions: __obs__ : 500rt,response_extra_dim_0 : 2
Coordinates: (2)
Data variables: (1)
rt,response
(__obs__, rt,response_extra_dim_0)
float64
6.933 1.0 1.175 ... 1.0 2.074 1.0
array([[ 6.93325281, 1. ],\n [ 1.17452705, 1. ],\n [ 0.86508274, 1. ],\n [ 4.09761763, 1. ],\n [ 1.27639914, 1. ],\n [ 2.80015469, -1. ],\n [ 1.52351105, 1. ],\n [ 2.97225881, -1. ],\n [ 3.21463704, 1. ],\n [ 1.68360186, 1. ],\n [ 2.82362413, 1. ],\n [ 1.06172681, 1. ],\n [ 4.06599569, 1. ],\n [ 3.33891726, 1. ],\n [ 1.29709721, -1. ],\n [ 2.98844099, 1. ],\n [ 1.28441131, 1. ],\n [ 2.38029313, 1. ],\n [ 2.56387377, 1. ],\n [ 1.64253199, 1. ],\n...\n [ 1.92144608, 1. ],\n [ 3.65872908, 1. ],\n [ 2.64628577, 1. ],\n [ 1.29509044, 1. ],\n [ 1.81381035, 1. ],\n [ 0.77135766, 1. ],\n [ 8.69867706, 1. ],\n [ 0.99507195, 1. ],\n [ 2.61183381, 1. ],\n [ 7.32697344, 1. ],\n [ 1.16251194, -1. ],\n [ 6.64661074, 1. ],\n [ 2.0273869 , 1. ],\n [ 2.99489498, 1. ],\n [ 1.20595527, 1. ],\n [ 1.21577442, 1. ],\n [ 1.22084749, 1. ],\n [ 1.76109827, 1. ],\n [ 1.9839462 , 1. ],\n [ 2.07443333, 1. ]]) Attributes: (6)
created_at : 2026-01-08T04:20:31.950959+00:00 arviz_version : 0.22.0 inference_library : pymc inference_library_version : 5.26.1 modeling_interface : bambi modeling_interface_version : 0.15.0 \n \n
\n \n \n \n
\n "
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using adapt_diag...\nSequential sampling (2 chains in 1 job)\nNUTS: [a, t, z, v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.847 7 40.34 draws/s 0:00:49 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.498 7 19.21 draws/s 0:01:44 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 2 chains for 1_000 tune and 1_000 draw iterations (2_000 + 2_000 draws total) took 104 seconds.\nWe recommend running at least 4 chains for robust computation of convergence diagnostics\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "TSoP",
+ "code_hash": "fa8fc962dec8120abf91979b8d8944f3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "QuIQ",
+ "code_hash": "b1430b8a86046b5a3dc419fd95f41e50",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "BxLv",
+ "code_hash": "23e22dcb7a5385e2f70b0066b489a949",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "SStq",
+ "code_hash": "712415a747c3eba268c85268ef79619c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Model initialized successfully.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "LNTb",
+ "code_hash": "860d5e2005534a06b440656c6fe5f691",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "application/vnd.marimo+mimebundle": {}
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "dGVZ",
+ "code_hash": "81cc53b9319916aea15084142ac167e4",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": "Using default initvals. \n\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Multiprocess sampling (4 chains in 4 jobs)\nCompoundStep\n>Slice: [a]\n>Slice: [t]\n>Slice: [z]\n>Slice: [v]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Tuning Steps out Steps in Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 False 0 0 16.90 draws/s 0:01:58 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 False 0 2 16.74 draws/s 0:01:59 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 False 0 0 16.65 draws/s 0:02:00 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 False 0 1 16.98 draws/s 0:01:57 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 4 chains for 1_000 tune and 1_000 draw iterations (4_000 + 4_000 draws total) took 120 seconds.\n\r 0%| | 0/4000 [00:00, ?it/s]\r 1%|\u2588\u258c | 50/4000 [00:00<00:07, 498.40it/s]\r 3%|\u2588\u2588\u2588\u258c | 113/4000 [00:00<00:06, 573.18it/s]\r 4%|\u2588\u2588\u2588\u2588\u2588\u258b | 179/4000 [00:00<00:06, 608.13it/s]\r 6%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 243/4000 [00:00<00:06, 620.57it/s]\r 8%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 306/4000 [00:00<00:05, 619.36it/s]\r 9%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 372/4000 [00:00<00:05, 632.23it/s]\r 11%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 436/4000 [00:00<00:05, 634.10it/s]\r 13%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 501/4000 [00:00<00:05, 638.24it/s]\r 14%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 565/4000 [00:00<00:05, 637.69it/s]\r 16%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 629/4000 [00:01<00:05, 633.82it/s]\r 17%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 693/4000 [00:01<00:05, 635.69it/s]\r 19%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 761/4000 [00:01<00:04, 648.93it/s]\r 21%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 829/4000 [00:01<00:04, 655.56it/s]\r 22%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 896/4000 [00:01<00:04, 659.67it/s]\r 24%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 962/4000 [00:01<00:04, 650.18it/s]\r 26%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1028/4000 [00:01<00:04, 652.34it/s]\r 27%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1094/4000 [00:01<00:04, 650.68it/s]\r 29%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1160/4000 [00:01<00:04, 637.80it/s]\r 31%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1224/4000 [00:01<00:04, 633.16it/s]\r 32%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1288/4000 [00:02<00:04, 630.18it/s]\r 34%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1353/4000 [00:02<00:04, 635.63it/s]\r 36%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 1420/4000 [00:02<00:04, 642.85it/s]\r 37%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1487/4000 [00:02<00:03, 649.67it/s]\r 39%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1557/4000 [00:02<00:03, 663.28it/s]\r 41%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 1630/4000 [00:02<00:03, 680.74it/s]\r 42%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 1699/4000 [00:02<00:03, 680.59it/s]\r 44%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 1768/4000 [00:02<00:03, 677.41it/s]\r 46%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 1836/4000 [00:02<00:03, 672.44it/s]\r 48%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 1904/4000 [00:02<00:03, 674.28it/s]\r 49%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 1974/4000 [00:03<00:02, 680.90it/s]\r 51%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 2047/4000 [00:03<00:02, 691.56it/s]\r 53%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 2117/4000 [00:03<00:02, 677.43it/s]\r 55%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 2185/4000 [00:03<00:02, 676.07it/s]\r 56%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 2254/4000 [00:03<00:02, 679.66it/s]\r 58%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 2323/4000 [00:03<00:02, 682.69it/s]\r 60%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 2395/4000 [00:03<00:02, 691.36it/s]\r 62%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 2465/4000 [00:03<00:02, 615.43it/s]\r 63%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 2529/4000 [00:03<00:02, 594.22it/s]\r 65%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 2590/4000 [00:04<00:02, 584.53it/s]\r 66%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 2650/4000 [00:04<00:02, 586.16it/s]\r 68%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 2710/4000 [00:04<00:02, 588.35it/s]\r 69%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 2773/4000 [00:04<00:02, 598.73it/s]\r 71%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 2842/4000 [00:04<00:01, 623.57it/s]\r 73%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 2911/4000 [00:04<00:01, 640.51it/s]\r 75%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 2983/4000 [00:04<00:01, 661.58it/s]\r 76%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 3050/4000 [00:04<00:01, 572.10it/s]\r 78%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 3114/4000 [00:04<00:01, 589.21it/s]\r 80%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 3182/4000 [00:04<00:01, 613.45it/s]\r 81%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258e | 3252/4000 [00:05<00:01, 636.83it/s]\r 83%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 3325/4000 [00:05<00:01, 661.48it/s]\r 85%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 3393/4000 [00:05<00:01, 543.16it/s]\r 86%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 3452/4000 [00:05<00:01, 457.79it/s]\r 88%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258f | 3503/4000 [00:05<00:01, 457.37it/s]\r 89%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258a | 3555/4000 [00:05<00:00, 471.99it/s]\r 90%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 3610/4000 [00:05<00:00, 490.03it/s]\r 92%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258d | 3666/4000 [00:05<00:00, 508.42it/s]\r 93%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 3719/4000 [00:06<00:00, 452.71it/s]\r 94%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258c | 3767/4000 [00:06<00:00, 455.03it/s]\r 96%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u258b | 3834/4000 [00:06<00:00, 512.14it/s]\r 98%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2589 | 3902/4000 [00:06<00:00, 557.46it/s]\r 99%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588 | 3972/4000 [00:06<00:00, 597.25it/s]\r100%|\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588\u2588| 4000/4000 [00:06<00:00, 609.60it/s]\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "cPpt",
+ "code_hash": "59e31295085f0dfc97f9594faef23010",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "hBDP",
+ "code_hash": "4bd4c0468e674450d1d780ffb724ced7",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "mVhx",
+ "code_hash": "c2ec6f089df4bb41f4448281a53cf42b",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "UJiw",
+ "code_hash": "4b70089f08f1c57165eda8e03fdffc01",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "zmDD",
+ "code_hash": "815a39cd1465813031c6232f5503c1dd",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "oDJM",
+ "code_hash": "8e7808a160e39289dcd2a8ae98802c7d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "oPFs",
+ "code_hash": "6dad5f821a610dafe93694ee8bbc83a9",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Initializing NUTS using jitter+adapt_diag...\nMultiprocess sampling (4 chains in 4 jobs)\nNUTS: [v, a, z, t]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stdout",
+ "text": " \n Progress Draws Divergences Step size Grad evals Sampling Speed Elapsed Remaining \n \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.732 7 24.56 draws/s 0:01:21 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.668 7 23.81 draws/s 0:01:23 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.585 7 24.12 draws/s 0:01:22 0:00:00 \n \u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501\u2501 2000 0 0.505 7 24.74 draws/s 0:01:20 0:00:00 \n \n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Sampling 4 chains for 1_000 tune and 1_000 draw iterations (4_000 + 4_000 draws total) took 84 seconds.\n",
+ "mimetype": "text/plain"
+ }
+ ]
+ },
+ {
+ "id": "HGJL",
+ "code_hash": "34accc155f0c7e6feea9e08ec79fbc0d",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IRGU",
+ "code_hash": "b553ce13f8fc9590c0a21feedcae1eb3",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/html": " "
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "MepM",
+ "code_hash": "3eea098532aedceb7b1e79f0a52d734c",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "BVRn",
+ "code_hash": "f75583fcae399d3325ea65f6576a2005",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "QzPM",
+ "code_hash": "c132a41b44bc4882cd9570335bf4d5d3",
+ "outputs": [
+ {
+ "type": "error",
+ "ename": "exception",
+ "evalue": "vmap got inconsistent sizes for array axes to be mapped:\n * one axis had size 1000: axis 0 of argument inputs of type float32[1000,2];\n * one axis had size 1: axis 0 of args[1] of type float32[1]",
+ "traceback": []
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Traceback (most recent call last): \n File "/tmp/marimo_7529/__marimo__cell_QzPM_.py" , line 2 , in <module> \n idata_object = pm . sample ( nuts_sampler = "numpyro" ) \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/mcmc.py" , line 802 , in sample \n return _sample_external_nuts ( \n ^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/mcmc.py" , line 391 , in _sample_external_nuts \n idata = pymc_jax . sample_jax_nuts ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/jax.py" , line 642 , in sample_jax_nuts \n initial_points = _get_batched_jittered_initial_points ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/jax.py" , line 236 , in _get_batched_jittered_initial_points \n initial_points = _init_jitter ( \n ^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/mcmc.py" , line 1475 , in _init_jitter \n point_logp = model_logp_fn ( point ) \n ^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/jax.py" , line 234 , in eval_logp_initial_point \n return logp_fn ([ jax . numpy . asarray ( v ) for v in point . values ()]) \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/jax.py" , line 143 , in logp_fn_wrap \n return logp_fn ( * x )[ 0 ] \n ^^^^^^^^^^^ \n File "/tmp/tmp6e9daamz" , line 87 , in jax_funcified_fgraph \n tensor_variable_42 = logp ( ANGLE , tensor_variable_41 , tensor_variable_35 , tensor_variable_29 , tensor_variable_23 , tensor_variable_20 ) \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \nValueError : vmap got inconsistent sizes for array axes to be mapped: \n * one axis had size 1000: axis 0 of argument inputs of type float32[1000,2]; \n * one axis had size 1: axis 0 of args[1] of type float32[1] \n-------------------- \nFor simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these. \n \n ",
+ "mimetype": "application/vnd.marimo+traceback"
+ }
+ ]
+ },
+ {
+ "id": "rfYh",
+ "code_hash": "0e731d2b61076ff7c0f6e562d7460532",
+ "outputs": [
+ {
+ "type": "error",
+ "ename": "exception",
+ "evalue": "An ancestor raised an exception (ValueError): ",
+ "traceback": []
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "YPtP",
+ "code_hash": "980e8f65848724704dd7efe185560f10",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "aBxI",
+ "code_hash": "0ad37bc67c000eb1752c28e1d314554f",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "IEQW",
+ "code_hash": "900efb663af6cdd5fa92c58b4dd29ae2",
+ "outputs": [
+ {
+ "type": "data",
+ "data": {
+ "text/plain": ""
+ }
+ }
+ ],
+ "console": []
+ },
+ {
+ "id": "fmbT",
+ "code_hash": "8dc2207b5b3833df0dd375fd6bfbc841",
+ "outputs": [
+ {
+ "type": "error",
+ "ename": "interruption",
+ "evalue": "This cell was interrupted and needs to be re-run",
+ "traceback": []
+ }
+ ],
+ "console": [
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "\r 0%| | 0/2000 [00:00, ?it/s]\r 0%| | 0/2000 [00:02, ?it/s]\n",
+ "mimetype": "text/plain"
+ },
+ {
+ "type": "stream",
+ "name": "stderr",
+ "text": "Traceback (most recent call last): \n File "/tmp/marimo_7529/__marimo__cell_fmbT_.py" , line 2 , in <module> \n idata_pymc_reg = pm . sample ( \n ^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/mcmc.py" , line 802 , in sample \n return _sample_external_nuts ( \n ^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/mcmc.py" , line 391 , in _sample_external_nuts \n idata = pymc_jax . sample_jax_nuts ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/jax.py" , line 652 , in sample_jax_nuts \n raw_mcmc_samples , sample_stats , library = sampler_fn ( \n ^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/pymc/sampling/jax.py" , line 489 , in _sample_numpyro_nuts \n pmap_numpyro . run ( \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/numpyro/infer/mcmc.py" , line 706 , in run \n states , last_state = _laxmap ( partial_map_fn , map_args ) \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/numpyro/infer/mcmc.py" , line 177 , in _laxmap \n ys . append ( f ( x )) \n ^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/numpyro/infer/mcmc.py" , line 489 , in _single_chain_mcmc \n collect_vals = fori_collect ( \n ^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/numpyro/util.py" , line 399 , in fori_collect \n vals = _body_fn ( i , * vals ) \n ^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/traceback_util.py" , line 180 , in reraise_with_filtered_traceback \n return fun ( * args , ** kwargs ) \n ^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/pjit.py" , line 263 , in cache_miss \n executable , pgle_profiler , const_args ) = _python_pjit_helper ( \n ^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/pjit.py" , line 146 , in _python_pjit_helper \n out_flat , compiled , profiler , const_args = _pjit_call_impl_python ( \n ^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/pjit.py" , line 1600 , in _pjit_call_impl_python \n compiled = computation . compile () \n ^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/interpreters/pxla.py" , line 2527 , in compile \n executable = UnloadedMeshExecutable . from_hlo ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/interpreters/pxla.py" , line 3073 , in from_hlo \n xla_executable = _cached_compilation ( \n ^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/interpreters/pxla.py" , line 2854 , in _cached_compilation \n xla_executable = compiler . compile_or_get_cached ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/compiler.py" , line 491 , in compile_or_get_cached \n return _compile_and_write_cache ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/compiler.py" , line 759 , in _compile_and_write_cache \n executable = backend_compile_and_load ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/profiler.py" , line 359 , in wrapper \n return func ( * args , ** kwargs ) \n ^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/jax/_src/compiler.py" , line 375 , in backend_compile_and_load \n return backend . compile_and_load ( \n ^^^^^^^^^^^^^^^^^^^^^^^^^ \n File "/home/jovan/Documents/projects/hssm_wksp/HSSM/.venv/lib/python3.12/site-packages/marimo/_runtime/handlers.py" , line 48 , in interrupt_handler \n raise MarimoInterrupt \nKeyboardInterrupt \n \n ",
+ "mimetype": "application/vnd.marimo+traceback"
+ }
+ ]
+ },
+ {
+ "id": "DVld",
+ "code_hash": "1380fe1f061d91f97d86b4f7152cf519",
+ "outputs": [
+ {
+ "type": "error",
+ "ename": "exception",
+ "evalue": "An ancestor raised an exception (KeyboardInterrupt): ",
+ "traceback": []
+ }
+ ],
+ "console": []
+ }
+ ]
+}
\ No newline at end of file
diff --git a/docs/tutorials/marimo_tutorial.py b/docs/tutorials/marimo_tutorial.py
new file mode 100644
index 000000000..11d52c6f2
--- /dev/null
+++ b/docs/tutorials/marimo_tutorial.py
@@ -0,0 +1,2912 @@
+import marimo
+
+__generated_with = "0.18.3"
+app = marimo.App(width="medium")
+
+
+@app.cell
+def _():
+ import marimo as mo
+ mo.notebook_dir()
+ return (mo,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ This tutorial provides a comprehensive introduction to the HSSM package for Hierarchical Bayesian Estimation of Sequential Sampling Models.
+
+ To make the most of the tutorial, let us cover the functionality of the key supporting packages that we use along the way.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Colab Instructions
+
+ If you would like to run this tutorial on Google colab, please click this [link](https://github.com/lnccbrown/HSSM/blob/main/docs/tutorials/main_tutorial.ipynb).
+
+ Once you are *in the colab*, follow the *installation instructions below* and then **restart your runtime**.
+
+ Just **uncomment the code in the next code cell** and run it!
+
+ **NOTE**:
+
+ You may want to *switch your runtime* to have a GPU or TPU. To do so, go to *Runtime* > *Change runtime type* and select the desired hardware accelerator.
+
+ Note that if you switch your runtime you have to follow the installation instructions again.
+ """)
+ return
+
+
+@app.cell
+def _():
+ # If running this on Colab, please uncomment the next line
+ # !pip install git+https://github.com/lnccbrown/HSSM@workshop_tutorial
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Basic Imports
+ """)
+ return
+
+
+@app.cell
+def _():
+ import warnings
+ warnings.filterwarnings("ignore")
+
+ # Basics
+ import numpy as np
+ from matplotlib import pyplot as plt
+
+ random_seed_sim = 134
+ np.random.seed(random_seed_sim)
+ return np, plt
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Data Simulation
+
+ We will rely on the [ssms](https://github.com/AlexanderFengler/ssm-simulators) package for data simulation repeatedly. Let's look at a basic isolated use case below.
+
+ As an example, let's use [ssms](https://github.com/AlexanderFengler/ssm-simulators) to simulate from the basic [Drift Diffusion Model](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2474742/) (a running example in this tutorial).
+
+
+
+ If you are not familiar with the DDM. For now just consider that it has four parameters.
+
+ - `v` the drift rate
+ - `a` the boundary separation
+ - `t` the non-decision time
+ - `z` the a priori decision bias (starting point)
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Using `simulate_data()`
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ HSSM comes with a basic simulator function supplied the `simulate_data()` function. We can use this function to create synthetic datasets.
+
+ Below we show the most basic usecase:
+
+ We wish to generate `500` datapoints (trials) from the standard [Drift Diffusion Model](https://www.ncbi.nlm.nih.gov/pmc/articles/PMC2474742/) with a fixed parameters, `v = 0.5, a = 1.5, z = 0.5, t = 0.5`.
+
+
+ **Note**:
+
+ In the course of the tutorial, we will see multiple strategies for synthetic dataset generation, this being the most straightforward one.
+ """)
+ return
+
+
+@app.cell
+def _():
+ # Single dataset
+ import arviz as az # Visualization
+ import bambi as bmb # Model construction
+ import hddm_wfpt
+ import jax
+ import pytensor # Graph-based tensor library
+
+ import hssm
+
+ # pytensor.config.floatX = "float32"
+ # jax.config.update("jax_enable_x64", False)
+
+ v_true = 0.5
+ a_true = 1.5
+ z_true = 0.5
+ t_true = 0.5
+
+ # Call the simulator function
+ dataset_simulated = hssm.simulate_data(
+ model="ddm", theta=dict(v=v_true, a=a_true, z=z_true, t=t_true), size=500
+ )
+
+ dataset_simulated
+ return (
+ a_true,
+ az,
+ bmb,
+ dataset_simulated,
+ hddm_wfpt,
+ hssm,
+ jax,
+ t_true,
+ v_true,
+ z_true,
+ )
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ If instead you wish to supply a parameter that *varies by trial* (a lot more on this later), you can simply supply a vector of parameters to the `theta` dictionary, when calling the simulator.
+
+ **Note**:
+
+ The `size` argument conceptually functions as *number of synthetic datasets*. So if you supply a parameter as a `(1000,)` vector, then the simulator assumes that one dataset consists of `1000` trials, hence if we set the `size = 1` as below, we expect in return a dataset with `1000` trials.
+ """)
+ return
+
+
+@app.cell
+def _(hssm, np, t_true, v_true, z_true):
+ # a changes trial wise
+ a_trialwise = np.random.normal(loc=2, scale=0.3, size=1000)
+
+ dataset_a_trialwise = hssm.simulate_data(
+ model="ddm",
+ theta=dict(
+ v=v_true,
+ a=a_trialwise,
+ z=z_true,
+ t=t_true,
+ ),
+ size=1,
+ )
+
+ dataset_a_trialwise
+ return (a_trialwise,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ If we wish to simulate from another model, we can do so by changing the `model` string.
+
+ The number of models we can simulate differs from the number of models for which we have likelihoods available (both will increase over time). To get the models for which likelihood functions are supplied out of the box, we should inspect `hssm.HSSM.supported_models`.
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ hssm.HSSM.supported_models
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ If we wish to check more detailed information about a given supported model, we can use the accessor `get_default_model_config` under `hssm.modelconfig`. For example, we inspect `ddm` model configuration below.
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ hssm.modelconfig.get_default_model_config("ddm")
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ This dictionary contains quite a bit of information. For purposes of *simulating data from a given model*, we will highlight two aspects:
+
+ 1. The key `list_of_params` provides us with the necessary information to define out `theta` dictionary
+ 2. The `bounds` key inside the `likelihoods` sub-dictionaries, provides us with an indication of reasonable parameter values.
+
+ The `likelihoods` dictionary inhabits three sub-directories for the `ddm` model, since we have all three, an `analytical`, an `approx_differentiable` (LAN) and a `blackbox` likelihood available. For many models, we will be able to access only one or two types of likelihoods.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Using `ssm-simulators`
+
+ Internally, HSSM natively makes use of the [ssm-simulators](https://github.com/AlexanderFengler/ssm-simulators) package for forward simulation of models.
+ `hssm.simulate_data()` functions essentially as a convenience-wrapper.
+
+ Below we illustrate how to simulate data using the `ssm-simulators` package directly, to generate an equivalent dataset as created above. We will use the *third* way of passing parameters to the simulator, which is as a parameter-*matrix*.
+
+ **Notes**:
+
+ 1. If you pass parameters as a parameter matrix, make sure the column ordering is correct. You can follow the parameter ordering under `hssm.defaults.default_model_config['ddm']['list_params']`.
+
+ 2. This is a minimal example, for more information about the package, check the associated [github-page](https://github.com/AlexanderFengler/ssm-simulators).
+ """)
+ return
+
+
+@app.cell
+def _(a_trialwise, np, t_true, v_true, z_true):
+ import pandas as pd
+ from ssms.basic_simulators.simulator import simulator
+
+ # a changes trial wise
+ theta_mat = np.zeros((1000, 4))
+ theta_mat[:, 0] = v_true # v
+ theta_mat[:, 1] = a_trialwise # a
+ theta_mat[:, 2] = z_true # z
+ theta_mat[:, 3] = t_true # t
+
+ # simulate data
+ sim_out_trialwise = simulator(
+ theta=theta_mat, # parameter_matrix
+ model="ddm", # specify model (many are included in ssms)
+ n_samples=1, # number of samples for each set of parameters
+ # (plays the role of `size` parameter in `hssm.simulate_data`)
+ )
+
+ # Turn into nice dataset
+ dataset_trialwise = pd.DataFrame(
+ np.column_stack(
+ [sim_out_trialwise["rts"][:, 0], sim_out_trialwise["choices"][:, 0]]
+ ),
+ columns=["rt", "response"],
+ )
+
+ dataset_trialwise
+ return (pd,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We will stick to `hssm.simulate_data()` in this tutorial, to keep things simple.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## ArviZ for Plotting
+
+
+
+
+ We use the [ArviZ](https://python.arviz.org/en/stable/) package for most of our plotting needs.
+ ArviZ is a useful aid for plotting when doing anything Bayesian.
+
+ It works with HSSM out of the box, by virtue of HSSMs reliance on [PyMC](https://www.pymc.io/welcome.html) for model construction and sampling.
+
+ Checking out the [ArviZ Documentation](https://python.arviz.org/en/stable/getting_started/index.html) is a good idea to give you communication superpowers for not only your HSSM results, but also other libraries in the Bayesian Toolkit such as [NumPyro](https://num.pyro.ai/en/latest/index.html#introductory-tutorials) or [STAN](https://mc-stan.org/users/documentation/).
+
+ We will see [ArviZ](https://python.arviz.org/en/stable/) plots throughout the notebook.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ # Main Tutorial
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Initial Dataset
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's proceed to simulate a simple dataset for our first example.
+ """)
+ return
+
+
+@app.cell
+def _(a_true, hssm, t_true, v_true, z_true):
+ # Specify
+ param_dict_init = dict(
+ v=v_true,
+ a=a_true,
+ z=z_true,
+ t=t_true,
+ )
+
+
+ dataset_simulated_main = hssm.simulate_data(
+ model="ddm",
+ theta=param_dict_init,
+ size=500,
+ )
+
+ dataset_simulated_main
+ return dataset_simulated_main, param_dict_init
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## First HSSM Model
+
+ In this example we will use the *analytical likelihood function* computed as suggested in [this paper](https://psycnet.apa.org/record/2009-11068-003).
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Instantiate the model
+
+ To instantiate our `HSSM` class, in the simplest version, we only need to provide an appropriate dataset.
+ The dataset is expected to be a `pandas.DataFrame` with at least two columns, respectively called `rt` (for reaction time) and `response`.
+ Our data simulated above is already in the correct format, so let us try to construct the class.
+
+ **NOTE:**
+
+ If you are a user of the [HDDM](https://github.com/hddm-devs/hddm) python package, this workflow should seem very familiar.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_simulated_main, hssm):
+ simple_ddm_model = hssm.HSSM(data=dataset_simulated_main)
+ return (simple_ddm_model,)
+
+
+@app.cell
+def _(simple_ddm_model):
+ simple_ddm_model
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ The `print()` function gives us some basic information about our model including the *number of observations* the *parameters in the model* and their respective *prior setting*. We can also create a nice little graphical representation of our model...
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Model Graph
+
+ Since `HSSM` creates a `PyMC Model`, we can can use the `.graph()` function, to get a graphical representation of the the model we created.
+ """)
+ return
+
+
+@app.cell
+def _(mo, simple_ddm_model):
+ graph_simulated_main = simple_ddm_model.graph()
+ # For marimo display
+ png_bytes_main = graph_simulated_main.pipe(format="png")
+ mo.image(png_bytes_main)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ This is the simplest model we can build. The graph above follows **plate notation**, commonly used for **probabilistic graphical models**.
+
+ - We have our basic parameters (unobserved, **white nodes**), these are *random variables* in the model and we want to estimate them
+ - Our observed reaction times and choices (`SSMRandomVariable`, **grey node**), are fixed (or conditioned on).
+ - **Rounded rectangles** provide us with information about dimensionality of objects
+ - **Rectangles with sharp edges** represent *deterministic*, but *computed* quantities (not shown here, but in later models)
+
+ This notation is helpful to get a quick overview of the structure of a given model we construct.
+
+ The `graph()` function of course becomes a lot more interesting and useful for more complicated models!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Sample from the Model
+
+ We can now call the `.sample()` function, to get posterior samples. The main arguments you may want to change are listed in the function call below.
+
+ Importantly, multiple backends are possible. We choose the `nuts_numpyro` backend below,
+ which in turn compiles the model to a [`JAX`](https://github.com/google/jax) function.
+ """)
+ return
+
+
+@app.cell
+def _(simple_ddm_model):
+ infer_data_simple_ddm_model = simple_ddm_model.sample(
+ sampler="mcmc", # type of sampler to choose, 'nuts_numpyro',
+ # 'nuts_blackjax' of default pymc nuts sampler
+ cores=1, # how many cores to use
+ chains=2, # how many chains to run
+ draws=500, # number of draws from the markov chain
+ tune=1000, # number of burn-in samples
+ idata_kwargs=dict(log_likelihood=True), # return log likelihood
+ mp_ctx="spawn",
+ ) # mp_ctx="forkserver")
+ return (infer_data_simple_ddm_model,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We sampled from the model, let's look at the output...
+ """)
+ return
+
+
+@app.cell
+def _(infer_data_simple_ddm_model):
+ type(infer_data_simple_ddm_model)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Errr... a closer look might be needed here!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Inference Data / What gets returned from the sampler?
+
+ The sampler returns an [ArviZ](https://python.arviz.org/en/stable/) `InferenceData` object.
+
+ To understand all the logic behind these objects and how they mesh with the Bayesian Workflow, we refer you to the [ArviZ Documentation](https://python.arviz.org/en/stable/getting_started/index.html).
+
+ `InferenceData` is build on top of [xarrays](https://docs.xarray.dev/en/stable/index.html). The [xarray documentation](https://docs.xarray.dev/en/stable/index.html) will help you understand in more detail how to manipulate these objects.
+
+ But let's take a quick high-level look to understand roughly what we are dealing with here!
+ """)
+ return
+
+
+@app.cell
+def _(infer_data_simple_ddm_model):
+ infer_data_simple_ddm_model
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We see that in our case, `infer_data_simple_ddm_model` contains four basic types of data (note: this is extensible!)
+
+ - `posterior`
+ - `log_likelihood`
+ - `sample_stats`
+ - `observed_data`
+
+ The `posterior` object contains our traces for each of the parameters in the model. The `log_likelihood` field contains the trial wise log-likelihoods for each sample from the posterior. The `sample_stats` field contains information about the sampler run. This can be important for chain diagnostics, but we will not dwell on this here. Finally we retreive our `observed_data`.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Basic Manipulation
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Accessing groups and variables
+ """)
+ return
+
+
+@app.cell
+def _(infer_data_simple_ddm_model):
+ infer_data_simple_ddm_model.posterior
+ return
+
+
+@app.cell
+def _(infer_data_simple_ddm_model):
+ infer_data_simple_ddm_model.posterior.a.head()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ To simply access the underlying data as a `numpy.ndarray`, we can use `.values` (as e.g. when using `pandas.DataFrame` objects).
+ """)
+ return
+
+
+@app.cell
+def _(infer_data_simple_ddm_model):
+ type(infer_data_simple_ddm_model.posterior.a.values)
+ return
+
+
+@app.cell
+def _():
+ # infer_data_simple_ddm_model.posterior.a.values
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Combine `chain` and `draw` dimension
+
+ When operating directly on the `xarray`, you will often find it useful to collapse the `chain` and `draw` coordinates into a single coordinate.
+ **Arviz** makes this easy via the `extract` method.
+ """)
+ return
+
+
+@app.cell
+def _(az, infer_data_simple_ddm_model):
+ idata_extracted = az.extract(infer_data_simple_ddm_model)
+ idata_extracted
+ return (idata_extracted,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Since *Arviz* really just calls the `.stack()` method from *xarray*, here the corresponding example using the lower level `xarray` interface.
+ """)
+ return
+
+
+@app.cell
+def _(infer_data_simple_ddm_model):
+ infer_data_simple_ddm_model.posterior.stack(sample=("chain", "draw"))
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Making use of ArviZ
+
+ Working with the `InferenceData` directly, is very helpful if you want to include custom computations into your workflow.
+ For a basic Bayesian Workflow however, you will often find that standard functionality available through [ArviZ](https://python.arviz.org/en/stable/)
+ suffices.
+
+ Below we provide a few examples of useful **Arviz** outputs, which come handy for analyzing your traces (MCMC samples).
+
+ #### Summary table
+
+ Let's take a look at a summary table for our posterior.
+ """)
+ return
+
+
+@app.cell
+def _(az, infer_data_simple_ddm_model, simple_ddm_model):
+ az.summary(
+ infer_data_simple_ddm_model,
+ var_names=[var_name.name for var_name in simple_ddm_model.pymc_model.free_RVs],
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ This table returns the parameter-wise mean of our posterior and a few extra statistics.
+
+ Of these extra statistics, the one-stop shop for flagging convergence issues is the `r_hat` value, which
+ is reported in the right-most column.
+
+ To navigate this statistic, here is a rule of thumb widely used in applied Bayesian statistics.
+
+ If you find an `r_hat` value `> 1.01`, it warrants investigation.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Trace plot
+ """)
+ return
+
+
+@app.cell
+def _(az, infer_data_simple_ddm_model, mo, param_dict_init, plt):
+ ax_trace = az.plot_trace(
+ infer_data_simple_ddm_model, # we exclude the log_likelihood traces here
+ lines=[(key_, {}, param_dict_init[key_]) for key_ in param_dict_init],
+ )
+
+ fig_trace = plt.gcf()
+
+ plt.tight_layout()
+ mo.mpl.interactive(fig_trace)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ The `.sample()` function also sets a `trace` attribute, on our `hssm` class, so instead, we could call the plot like so:
+ """)
+ return
+
+
+@app.cell
+def _(az, mo, param_dict_init, plt, simple_ddm_model):
+ az.plot_trace(
+ simple_ddm_model.traces,
+ lines=[(key_, {}, param_dict_init[key_]) for key_ in param_dict_init],
+ );
+
+ fig_trace2 = plt.gcf()
+
+ plt.tight_layout()
+ mo.mpl.interactive(fig_trace2)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ In this tutorial we are most often going to use the latter way of accessing the traces, but there is no preferred option.
+
+ Let's look at a few more plots.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Forest Plot
+
+ The forest plot is commonly used for a quick visual check of the marginal posteriors. It is very effective for intuitive communication of results.
+ """)
+ return
+
+
+@app.cell
+def _(az, simple_ddm_model):
+ az.plot_forest(simple_ddm_model.traces)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ##### Combining Chains
+ By default, chains are separated out into *separate caterpillars*, however
+ sometimes, especially if you are looking at a forest plot which includes many posterior parameters at once, you want to declutter and collapse the chains into single caterpillars.
+ In this case you can `combine` chains instead.
+ """)
+ return
+
+
+@app.cell
+def _(az, simple_ddm_model):
+ az.plot_forest(simple_ddm_model.traces, combined=True)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Basic Marginal Posterior Plot
+
+ Another way to view the marginal posteriors is provided by the `plot_posterior()` function. It shows the mean and by default the $94\%$ HDIs.
+ """)
+ return
+
+
+@app.cell
+def _(az, simple_ddm_model):
+ az.plot_posterior(simple_ddm_model.traces)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Especially for parameter recovery studies, you may want to include **reference values** for the parameters of interest.
+
+ You can do so using the `ref_val` argument. See the example below:
+ """)
+ return
+
+
+@app.cell
+def _(az, param_dict_init, simple_ddm_model):
+ az.plot_posterior(
+ simple_ddm_model.traces,
+ ref_val=[
+ param_dict_init[var_name]
+ for var_name in simple_ddm_model.traces.posterior.data_vars
+ ],
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Since it is sometimes useful, especially for more complex cases, below an alternative approach in which we pass `ref_val` as a dictionary.
+ """)
+ return
+
+
+@app.cell
+def _(az, param_dict_init, simple_ddm_model):
+ az.plot_posterior(
+ simple_ddm_model.traces,
+ ref_val={
+ "v": [{"ref_val": param_dict_init["v"]}],
+ "a": [{"ref_val": param_dict_init["a"]}],
+ "z": [{"ref_val": param_dict_init["z"]}],
+ "t": [{"ref_val": param_dict_init["t"]}],
+ },
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Posterior Pair Plot
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ The posterior pair plot show us bi-variate traceplots and is useful to check for simple parameter tradeoffs that may emerge. The simplest (linear) tradeoff may be a high correlation between two parameters.
+ This can be very helpful in diagnosing sampler issues for example. If such tradeoffs exist, one often see extremely *wide marginal distributions*.
+
+ In our `ddm` example, we see a little bit of a tradeoff between `a` and `t`, as well as between `v` and `z`, however nothing concerning.
+ """)
+ return
+
+
+@app.cell
+def _(az, mo, param_dict_init, plt, simple_ddm_model):
+ az.plot_pair(
+ simple_ddm_model.traces,
+ kind="kde",
+ reference_values=param_dict_init,
+ marginals=True,
+ );
+
+ pair_plot = plt.gcf()
+
+ plt.tight_layout()
+ mo.mpl.interactive(pair_plot)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ The few plot we showed here are just the beginning: [ArviZ](https://python.arviz.org/en/stable/) has a much broader spectrum of graphs and other convenience function available. Just check the [documentation](https://python.arviz.org/en/stable/).
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Compute Quantities from idata
+
+ #### Example: Mean and Covariance of Posterior Parameters
+
+ As a simple example, let us calculate the covariance matrix for our posterior samples.
+ """)
+ return
+
+
+@app.cell
+def _(idata_extracted, np, plt):
+ # Calculate the correlation matrix
+ posterior_correlation_matrix = np.corrcoef(
+ np.stack(
+ [idata_extracted[var_].values for var_ in idata_extracted.data_vars.variables]
+ )
+ )
+ num_vars = posterior_correlation_matrix.shape[0]
+
+ # Make heatmap
+ fig, ax = plt.subplots(1, 1)
+ cax = ax.imshow(posterior_correlation_matrix, cmap="coolwarm", vmin=-1, vmax=1)
+ fig.colorbar(cax, ax=ax)
+ ax.set_title("Posterior Correlation Matrix")
+
+ # Add ticks
+ ax.set_xticks(range(posterior_correlation_matrix.shape[0]))
+ ax.set_xticklabels([var_ for var_ in idata_extracted.data_vars.variables])
+ ax.set_yticks(range(posterior_correlation_matrix.shape[0]))
+ ax.set_yticklabels([var_ for var_ in idata_extracted.data_vars.variables])
+
+ # Annotate heatmap
+ for i_cor in range(num_vars):
+ for j_cor in range(num_vars):
+ ax.text(
+ j_cor,
+ i_cor,
+ f"{posterior_correlation_matrix[i_cor, j_cor]:.2f}",
+ ha="center",
+ va="center",
+ color="black",
+ )
+
+ plt.show()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## HSSM Model based on LAN likelihood
+
+ With HSSM you can switch between pre-supplied models with a simple change of argument. The type of likelihood that will be accessed might change in the background for you.
+
+ Here we see an example in which the underlying likelihood is now a [LAN](https://elifesciences.org/articles/65074).
+
+ We will talk more about different types of likelihood functions and backends later in the tutorial. For now just keep the following in mind:
+
+ There are three types of likelihoods
+
+ 1. `analytic`
+ 2. `approx_differentiable`
+ 3. `blackbox`
+
+ To check which type is used in your HSSM model simple type:
+ """)
+ return
+
+
+@app.cell
+def _(simple_ddm_model):
+ simple_ddm_model.loglik_kind
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Ah... we were using an `analytical` likelihood with the DDM model in the last section.
+ Now let's see something different!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Simulating Angle Data
+
+ Again, let us simulate a simple dataset. This time we will use the `angle` model (passed via the `model` argument to the `simulator()` function).
+
+
+ This model is distinguished from the basic `ddm` model by an additional `theta` parameter which specifies the angle with which the decision boundaries collapse over time.
+
+
+
+ DDMs with collapsing bounds have been of significant interest in the theoretical literature, but applications were rare due to a lack of analytical likelihoods. HSSM facilitates inference with such models via the our `approx_differentiable` likelihoods. HSSM ships with a few predefined models based on [LANs](https://elifesciences.org/articles/65074), but really we don't want to overemphasize those. They reflect the research interest of our and adjacent labs to a great extend.
+
+ Instead, we encourage the community to contribute to this model reservoir (more on this later).
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ # Simulate angle data
+ v_angle_true = 0.5
+ a_angle_true = 1.5
+ z_angle_true = 0.5
+ t_angle_true = 0.2
+ theta_angle_true = 0.2
+
+ param_dict_angle = dict(v=0.5, a=1.5, z=0.5, t=0.2, theta=0.2)
+
+ lines_list_angle = [(key_, {}, param_dict_angle[key_]) for key_ in param_dict_angle]
+
+ dataset_angle = hssm.simulate_data(model="angle", theta=param_dict_angle, size=1000)
+ return dataset_angle, lines_list_angle
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We pass a single additional argument to our `HSSM` class and set `model='angle'`.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_angle, hssm):
+ model_angle = hssm.HSSM(data=dataset_angle, model="angle")
+
+ model_angle
+ return (model_angle,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ The model graph now show us an additional parameter `theta`!
+ """)
+ return
+
+
+@app.cell
+def _(mo, model_angle):
+ model_angle_graph = model_angle.graph()
+ png_bytes_angle = model_angle_graph.pipe(format="png")
+ mo.image(png_bytes_angle)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's check the type of likelihood that is used under the hood ...
+ """)
+ return
+
+
+@app.cell
+def _(model_angle):
+ model_angle.loglik_kind
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Ok so here we rely on a likelihood of the `approx_differentiable` kind.
+
+ As discussed, with the initial set of pre-supplied likelihoods, this implies that we are using a [LAN](https://elifesciences.org/articles/65074) in the background.
+ """)
+ return
+
+
+@app.cell
+def _(jax, model_angle):
+ jax.config.update("jax_enable_x64", False)
+ infer_data_angle = model_angle.sample(
+ sampler="nuts_numpyro",
+ chains=2,
+ cores=2,
+ draws=500,
+ tune=500,
+ idata_kwargs=dict(log_likelihood=False), # no need to return likelihoods here
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(az, lines_list_angle, mo, model_angle, plt):
+ az.plot_trace(model_angle.traces, lines=lines_list_angle)
+ plt.tight_layout()
+
+ plot_trace3 = plt.gcf()
+
+ plt.tight_layout()
+ mo.mpl.interactive(plot_trace3)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Choosing Priors
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ HSSM allows you to specify priors quite freely. If you used HDDM previously, you may feel relieved to read that your hands are now untied!
+
+
+
+
+ With HSSM we have multiple routes to priors. But let's first consider a special case:
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Fixing a parameter to a given value
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Assume that instead of fitting all parameters of the DDM,
+
+
+
+ we instead want to fit only the `v` (drift) parameter, setting all other parameters to fixed scalar values.
+
+
+
+ HSSM makes this extremely easy!
+ """)
+ return
+
+
+@app.cell
+def _(param_dict_init):
+ param_dict_init
+ return
+
+
+@app.cell
+def _(dataset_simulated_main, hssm, param_dict_init):
+ ddm_model_only_v = hssm.HSSM(
+ data=dataset_simulated_main,
+ model="ddm",
+ a=param_dict_init["a"],
+ t=param_dict_init["t"],
+ z=param_dict_init["z"],
+ )
+ return (ddm_model_only_v,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Since we fix all but one parameter, we therefore estimate only one parameter. This should be reflected in our model graph, where we expect only one free random variable `v`:
+ """)
+ return
+
+
+@app.cell
+def _(ddm_model_only_v, mo):
+ ddm_model_only_v_graph = ddm_model_only_v.graph()
+ ddm_model_only_v_graph_png = ddm_model_only_v_graph.pipe(format="png")
+ mo.image(ddm_model_only_v_graph_png)
+ return
+
+
+@app.cell
+def _(ddm_model_only_v):
+ ddm_model_only_v.sample(
+ sampler="mcmc",
+ chains=2,
+ cores=2,
+ draws=500,
+ tune=500,
+ idata_kwargs=dict(log_likelihood=False), # no need to return likelihoods here
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(az, ddm_model_only_v, param_dict_init, plt):
+ az.plot_trace(
+ ddm_model_only_v.traces.posterior, lines=[("v", {}, param_dict_init["v"])]
+ );
+ plt.gcf()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Instead of the trace on the right, a useful alternative / complement is the **rank plot**.
+ As a rule of thumb, if the rank plots within chains look *uniformly distributed*, then our chains generally exhibit *good mixing*.
+ """)
+ return
+
+
+@app.cell
+def _(az, ddm_model_only_v):
+ az.plot_trace(ddm_model_only_v.traces, kind="rank_bars")
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Named priors
+
+ We can choose any [PyMC](https://www.pymc.io/welcome.html) `Distribution` to specify a prior for a given parameter.
+
+ Even better, if natural *parameter bounds* are provided, HSSM *automatically truncates the prior distribution* so that it respect these bounds.
+
+ Below is an example in which we specify a *Normal* prior on the `v` parameter of the DDM.
+
+ We choose a *ridiculously low* $\sigma$ value, to illustrate it's regularizing effect on the parameter (just so we see a difference and you are convinced that something changed).
+ """)
+ return
+
+
+@app.cell
+def _(dataset_simulated_main, hssm):
+ model_normal = hssm.HSSM(
+ data=dataset_simulated_main,
+ include=[
+ {
+ "name": "v",
+ "prior": {"name": "Normal", "mu": 0, "sigma": 0.01},
+ }
+ ],
+ )
+ return (model_normal,)
+
+
+@app.cell
+def _(model_normal):
+ model_normal
+ return
+
+
+@app.cell
+def _(model_normal):
+ infer_data_normal = model_normal.sample(
+ sampler="mcmc",
+ chains=2,
+ cores=2,
+ draws=500,
+ tune=500,
+ idata_kwargs=dict(log_likelihood=False), # no need to return likelihoods here
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(az, model_normal, param_dict_init):
+ az.plot_trace(
+ model_normal.traces,
+ lines=[(key_, {}, param_dict_init[key_]) for key_ in param_dict_init],
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Observe how we reused our previous dataset with underlying parameters
+
+ - `v = 0.5`
+ - `a = 1.5`
+ - `z = 0.5`
+ - `t = 0.2`
+
+ In contrast to our previous sampler round, in which we used Uniform priors, here the `v` estimate is shrunk severley towared $0$ and the `t` and `z` parameter estimates are very biased to make up for this distortion. Also, overall we see a lot of divergences now, which is a sign of poor sampler performance.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## HSSM Model with Regression
+
+
+
+
+ Crucial to the scope of HSSM is the ability to link parameters with trial-by-trial covariates via (hierarchical, but more on this later) general linear models.
+
+ In this section we explore how HSSM deals with these models. No big surprise here... it's simple!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Case 1: One parameter is a Regression Target
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Simulating Data
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's first simulate some data, where the trial-by-trial parameters of the `v` parameter in our model are driven by a simple linear regression model.
+
+
+ The regression model is driven by two (random) covariates `x` and `y`, respectively with coefficients of $0.8$ and $0.3$ which are also simulated below.
+ We set the intercept to $0.3$.
+
+ The rest of the parameters are fixed to single values as before.
+ """)
+ return
+
+
+@app.cell
+def _(hssm, np):
+ # Set up trial by trial parameters
+ v_intercept = 0.3
+ x = np.random.uniform(-1, 1, size=1000)
+ v_x = 0.8
+ y = np.random.uniform(-1, 1, size=1000)
+ v_y = 0.3
+ v_reg_v = v_intercept + (v_x * x) + (v_y * y)
+
+ # rest
+ a_reg_v = 1.5
+ z_reg_v = 0.5
+ t_reg_v = 0.1
+
+ param_dict_reg_v = dict(
+ a=1.5, z=0.5, t=0.1, v=v_reg_v, v_x=v_x, v_y=v_y, v_Intercept=v_intercept, theta=0.0
+ )
+
+ # base dataset
+ dataset_reg_v = hssm.simulate_data(model="ddm", theta=param_dict_reg_v, size=1)
+
+ # Adding covariates into the datsaframe
+ dataset_reg_v["x"] = x
+ dataset_reg_v["y"] = y
+ return dataset_reg_v, param_dict_reg_v, v_intercept, v_x, v_y, x, y
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Basic Model
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We now create the `HSSM` model.
+
+ Notice how we set the `include` argument. The include argument expects a list of dictionaries, one dictionary for each parameter to be specified via a regression model.
+
+ Four `keys` are expected to be set:
+
+ - The `name` of the parameter,
+ - Potentially a `prior` for each of the regression level parameters ($\beta$'s),
+ - The regression `formula`
+ - A `link` function.
+
+ The regression formula follows the syntax in the [formulae](https://pypi.org/project/formulae/) python package (as used by the [Bambi](https://bambinos.github.io/bambi/) package for building Bayesian Hierarchical Regression Models.
+
+ [Bambi](https://bambinos.github.io/bambi/) forms the main model-construction backend of HSSM.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_reg_v, hssm):
+ model_reg_v_simple = hssm.HSSM(
+ data=dataset_reg_v, include=[{"name": "v", "formula": "v ~ 1 + x + y"}]
+ )
+ return (model_reg_v_simple,)
+
+
+@app.cell
+def _(model_reg_v_simple):
+ model_reg_v_simple
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ##### `Param` class
+ As illustrated below, there is an alternative way of specifying the parameter specific data via the `Param` class.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_reg_v, hssm):
+ model_reg_v_simple_new = hssm.HSSM(
+ data=dataset_reg_v, include=[hssm.Param(name="v", formula="v ~ 1 + x + y")]
+ )
+ return (model_reg_v_simple_new,)
+
+
+@app.cell
+def _(model_reg_v_simple_new):
+ model_reg_v_simple_new
+ return
+
+
+@app.cell
+def _(mo, model_reg_v_simple):
+ model_reg_v_simple_graph = model_reg_v_simple.graph()
+
+ model_reg_v_simple_png = model_reg_v_simple_graph.pipe(format="png")
+ mo.image(model_reg_v_simple_png)
+ return
+
+
+@app.cell
+def _(model_reg_v_simple):
+ print(model_reg_v_simple)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ #### Custom Model
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ These were the defaults, with a little extra labor, we can e.g. customize the choice of priors for each parameter in the model.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_reg_v, hssm):
+ model_reg_v = hssm.HSSM(
+ data=dataset_reg_v,
+ include=[
+ {
+ "name": "v",
+ "prior": {
+ "Intercept": {"name": "Uniform", "lower": -3.0, "upper": 3.0},
+ "x": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ "y": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ },
+ "formula": "v ~ 1 + x + y",
+ "link": "identity",
+ }
+ ],
+ )
+ return (model_reg_v,)
+
+
+@app.cell
+def _(model_reg_v):
+ model_reg_v
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Notice how `v` is now set as a regression.
+ """)
+ return
+
+
+@app.cell
+def _(model_reg_v):
+ infer_data_reg_v = model_reg_v.sample(
+ sampler="mcmc",
+ chains=2,
+ cores=2,
+ draws=500,
+ tune=500,
+ mp_ctx="spawn",
+ )
+ return (infer_data_reg_v,)
+
+
+@app.cell
+def _(infer_data_reg_v):
+ infer_data_reg_v
+ return
+
+
+@app.cell
+def _():
+ # az.plot_forest(model_reg_v.traces)
+ return
+
+
+@app.cell
+def _(az, model_reg_v, param_dict_reg_v):
+ az.plot_trace(
+ model_reg_v.traces,
+ var_names=["~v"],
+ lines=[(key_, {}, param_dict_reg_v[key_]) for key_ in param_dict_reg_v],
+ )
+ return
+
+
+@app.cell
+def _(az, model_reg_v, param_dict_reg_v, plt):
+ az.plot_trace(
+ model_reg_v.traces,
+ var_names=["~v"],
+ lines=[(key_, {}, param_dict_reg_v[key_]) for key_ in param_dict_reg_v],
+ )
+ plt.tight_layout()
+ plt.gcf()
+ return
+
+
+@app.cell
+def _(az, model_reg_v):
+ # Looks like parameter recovery was successful
+ az.summary(model_reg_v.traces, var_names=["~v"])
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Case 2: One parameter is a Regression (LAN)
+
+ We can do the same thing with the `angle` model.
+
+ **Note**:
+
+ Our dataset was generated from the basic DDM here, so since the DDM assumes stable bounds, we expect the `theta` (angle of linear collapse) parameter to be recovered as close to $0$.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_reg_v, hssm):
+ model_reg_v_angle = hssm.HSSM(
+ data=dataset_reg_v,
+ model="angle",
+ include=[
+ {
+ "name": "v",
+ "prior": {
+ "Intercept": {
+ "name": "Uniform",
+ "lower": -3.0,
+ "upper": 3.0,
+ },
+ "x": {
+ "name": "Uniform",
+ "lower": -1.0,
+ "upper": 1.0,
+ },
+ "y": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ },
+ "formula": "v ~ 1 + x + y",
+ "link": "identity",
+ }
+ ],
+ )
+ return (model_reg_v_angle,)
+
+
+@app.cell
+def _(mo, model_reg_v_angle):
+ model_reg_v_angle_graph = model_reg_v_angle.graph()
+
+ model_reg_v_angle_png = model_reg_v_angle_graph.pipe(format="png")
+ mo.image(model_reg_v_angle_png)
+ return
+
+
+@app.cell
+def _(model_reg_v_angle):
+ trace_reg_v_angle = model_reg_v_angle.sample(
+ sampler="mcmc",
+ chains=1,
+ cores=1,
+ draws=1000,
+ tune=500,
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(az, model_reg_v_angle, param_dict_reg_v, plt):
+ az.plot_trace(
+ model_reg_v_angle.traces,
+ var_names=["~v"],
+ lines=[(key_, {}, param_dict_reg_v[key_]) for key_ in param_dict_reg_v],
+ )
+ plt.tight_layout()
+ plt.gcf()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Great! `theta` is recovered correctly, on top of that, we have reasonable recovery for all other parameters!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Case 3: Multiple Parameters are Regression Targets (LAN)
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's get a bit more ambitious. We may, for example, want to try a regression on a few of our basic model parameters at once. Below we show an example where we model both the `a` and the `v` parameters with a regression.
+
+ **NOTE:**
+
+ In our dataset of this section, only `v` is *actually* driven by a trial-by-trial regression, so we expect the regression coefficients for `a` to hover around $0$ in our posterior.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_reg_v, hssm, param_dict_reg_v):
+ # Instantiate our hssm model
+ from copy import deepcopy
+
+ param_dict_reg_v_a = deepcopy(param_dict_reg_v)
+ param_dict_reg_v_a["a_Intercept"] = param_dict_reg_v_a["a"]
+ param_dict_reg_v_a["a_x"] = 0
+ param_dict_reg_v_a["a_y"] = 0
+
+ hssm_reg_v_a_angle = hssm.HSSM(
+ data=dataset_reg_v,
+ model="angle",
+ include=[
+ {
+ "name": "v",
+ "prior": {
+ "Intercept": {"name": "Uniform", "lower": -3.0, "upper": 3.0},
+ "x": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ "y": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ },
+ "formula": "v ~ 1 + x + y",
+ },
+ {
+ "name": "a",
+ "prior": {
+ "Intercept": {"name": "Uniform", "lower": 0.5, "upper": 3.0},
+ "x": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ "y": {"name": "Uniform", "lower": -1.0, "upper": 1.0},
+ },
+ "formula": "a ~ 1 + x + y",
+ },
+ ],
+ )
+ return hssm_reg_v_a_angle, param_dict_reg_v_a
+
+
+@app.cell
+def _(hssm_reg_v_a_angle):
+ hssm_reg_v_a_angle
+ return
+
+
+@app.cell
+def _(hssm_reg_v_a_angle, mo):
+ hssm_reg_v_a_angle_graph = hssm_reg_v_a_angle.graph()
+
+ hssm_reg_v_a_angle_png = hssm_reg_v_a_angle_graph.pipe(format="png")
+ mo.image(hssm_reg_v_a_angle_png)
+ return
+
+
+@app.cell
+def _(hssm_reg_v_a_angle):
+ infer_data_reg_v_a = hssm_reg_v_a_angle.sample(
+ sampler="mcmc",
+ chains=2,
+ cores=1,
+ draws=1000,
+ tune=1000,
+ mp_ctx="spawn",
+ )
+ return (infer_data_reg_v_a,)
+
+
+@app.cell
+def _(az, infer_data_reg_v_a):
+ az.summary(
+ infer_data_reg_v_a, var_names=["~a", "~v"]
+ ) # , var_names=["~rt,response_a"])
+ return
+
+
+@app.cell
+def _(az, hssm_reg_v_a_angle, param_dict_reg_v_a, plt):
+ az.plot_trace(
+ hssm_reg_v_a_angle.traces,
+ var_names=["~v", "~a"],
+ lines=[(key_, {}, param_dict_reg_v_a[key_]) for key_ in param_dict_reg_v_a],
+ )
+ plt.tight_layout()
+ plt.gcf()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We successfully recover our regression betas for `a`! Moreover, no warning signs concerning our chains.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Case 4: Categorical covariates
+ """)
+ return
+
+
+@app.cell
+def _(hssm, np):
+ # Set up trial by trial parameters
+ x_cat = np.random.choice(4, size=1000).astype(int)
+ x_offset_cat = np.array([0, 1, -0.5, 0.75])
+
+ y_cat = np.random.uniform(-1, 1, size=1000)
+ v_y_cat = 0.3
+ v_reg_v_cat = 0 + (v_y_cat * y_cat) + x_offset_cat[x_cat]
+
+ # rest
+ a_reg_v_cat = 1.5
+ z_reg_v_cat = 0.5
+ t_reg_v_cat = 0.1
+
+ # base dataset
+ dataset_reg_v_cat = hssm.simulate_data(
+ model="ddm", theta=dict(v=v_reg_v_cat, a=a_reg_v_cat, z=z_reg_v_cat, t=t_reg_v_cat), size=1
+ )
+
+ # Adding covariates into the datsaframe
+ dataset_reg_v_cat["x"] = x_cat
+ dataset_reg_v_cat["y"] = y_cat
+ return (dataset_reg_v_cat,)
+
+
+@app.cell
+def _(dataset_reg_v_cat, hssm):
+ model_reg_v_cat = hssm.HSSM(
+ data=dataset_reg_v_cat,
+ model="angle",
+ include=[
+ {
+ "name": "v",
+ "formula": "v ~ 0 + C(x) + y",
+ "link": "identity",
+ }
+ ],
+ )
+ return (model_reg_v_cat,)
+
+
+@app.cell
+def _(mo, model_reg_v_cat):
+ model_reg_v_cat_graph = model_reg_v_cat.graph()
+
+ model_reg_v_cat_png = model_reg_v_cat_graph.pipe(format="png")
+ mo.image(model_reg_v_cat_png)
+ return
+
+
+@app.cell
+def _(model_reg_v_cat):
+ infer_data_reg_v_cat = model_reg_v_cat.sample(
+ sampler="mcmc",
+ chains=2,
+ cores=1,
+ draws=1000,
+ tune=500,
+ mp_ctx="spawn",
+ )
+ return (infer_data_reg_v_cat,)
+
+
+@app.cell
+def _(az, infer_data_reg_v_cat):
+ az.plot_forest(infer_data_reg_v_cat, var_names=["~v"])
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Hierarchical Inference
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's try to fit a hierarchical model now. We will simulate a dataset with $15$ participants, with $200$ observations / trials for each participant.
+
+ We define a group mean `mean_v` and a group standard deviation `sd_v` for the intercept parameter of the regression on `v`, which we sample from a corresponding normal distribution for each participant.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Simulate Data
+ """)
+ return
+
+
+@app.cell
+def _(hssm, np, pd):
+ # Make some hierarchical data
+ n_participants = 15 # number of participants
+ n_trials = 200 # number of trials per participant
+
+ sd_v = 0.5 # sd for v-intercept
+ mean_v = 0.5 # mean for v-intercept
+
+ data_list = []
+ for i in range(n_participants):
+ # Make parameters for participant i
+ v_intercept_hier = np.random.normal(mean_v, sd_v, size=1)
+ x_hier = np.random.uniform(-1, 1, size=n_trials)
+ v_x_hier = 0.8
+ y_hier = np.random.uniform(-1, 1, size=n_trials)
+ v_y_hier = 0.3
+ v_hier = v_intercept_hier + (v_x_hier * x_hier) + (v_y_hier * y_hier)
+
+ a_hier = 1.5
+ t_hier = 0.5
+ z_hier = 0.5
+
+ # true_values = np.column_stack(
+ # [v, np.repeat([[1.5, 0.5, 0.5, 0.0]], axis=0, repeats=n_trials)]
+ # )
+
+ data_tmp = hssm.simulate_data(
+ model="ddm", theta=dict(v=v_hier, a=a_hier, z=z_hier, t=t_hier), size=1
+ )
+ data_tmp["participant_id"] = i
+ data_tmp["x"] = x_hier
+ data_tmp["y"] = y_hier
+
+ data_list.append(data_tmp)
+
+ # Make single dataframe out of participant-wise datasets
+ dataset_reg_v_hier = pd.concat(data_list)
+ dataset_reg_v_hier
+ return (dataset_reg_v_hier,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We can now define our `HSSM` model.
+
+ We specify the regression as `v ~ 1 + (1|participant_id) + x + y`.
+
+ `(1|participant_id)` tells the model to create a *participant-wise* offset for the intercept parameter. The rest of the regression $\beta$'s is fit globally.
+
+ As an **R** user you may recognize this syntax from the [lmer](https://www.rdocumentation.org/packages/lme4/versions/1.1-33/topics/lmer) package.
+
+ Our [Bambi](https://bambinos.github.io/bambi/) backend is essentially a Bayesian version of [lmer](https://www.rdocumentation.org/packages/lme4/versions/1.1-33/topics/lmer), quite like the [BRMS](https://cran.r-project.org/web/packages/brms/index.html) package in **R**, which operates on top of [STAN](https://mc-stan.org/).
+
+ As a previous [HDDM](https://hddm.readthedocs.io/en/latest/) user, you may recognize that now proper mixed-effect models are viable!
+
+ You should be able to handle between and within participant effects naturally now!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Basic Hierarchical Model
+ """)
+ return
+
+
+@app.cell
+def _(dataset_reg_v_hier, hssm):
+ model_reg_v_angle_hier = hssm.HSSM(
+ data=dataset_reg_v_hier,
+ model="angle",
+ noncentered=True,
+ include=[
+ {
+ "name": "v",
+ "prior": {
+ "Intercept": {
+ "name": "Normal",
+ "mu": 0.0,
+ "sigma": 0.5,
+ },
+ "x": {"name": "Normal", "mu": 0.0, "sigma": 0.5},
+ "y": {"name": "Normal", "mu": 0.0, "sigma": 0.5},
+ },
+ "formula": "v ~ 1 + (1|participant_id) + x + y",
+ "link": "identity",
+ }
+ ],
+ )
+ return (model_reg_v_angle_hier,)
+
+
+@app.cell
+def _(mo, model_reg_v_angle_hier):
+ model_reg_v_angle_hier_graph = model_reg_v_angle_hier.graph()
+
+ model_reg_v_angle_hier_graph_png = model_reg_v_angle_hier_graph.pipe(format="png")
+ mo.image(model_reg_v_angle_hier_graph_png)
+ return
+
+
+@app.cell
+def _(jax, model_reg_v_angle_hier):
+ jax.config.update("jax_enable_x64", False)
+ model_reg_v_angle_hier.sample(
+ sampler="mcmc",
+ chains=2,
+ cores=2,
+ draws=500,
+ tune=500,
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's look at the posteriors!
+ """)
+ return
+
+
+@app.cell
+def _(az, model_reg_v_angle_hier):
+ az.plot_forest(model_reg_v_angle_hier.traces, var_names=["~v", "~a"], combined=False)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Model Comparison
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Fitting single models is all well and good. We are however, often interested in comparing how well a few different models account for the same data.
+
+ Through [ArviZ](https://python.arviz.org/en/stable/index.html), we have out of the box access to modern Bayesian Model Comparison. We will keep it simple here, just to illustrate the basic idea.
+
+ ### Scenario
+
+ The following scenario is explored.
+
+ First we generate data from a `ddm` model with fixed parameters, specifically we set the `a` parameter to $1.5$.
+
+ We then define two `HSSM` models:
+
+ 1. A model which allows fitting all but the `a` parameter, which is fixed to $1.0$ (wrong)
+ 2. A model which allows fitting all but the `a` parameter, which is fixed to $1.5$ (correct)
+
+ We then use the [ArviZ](https://python.arviz.org/en/stable/index.html)'s `compare()` function, to perform model comparison via `elpd_loo`.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Data Simulation
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ # Parameters
+ param_dict_mod_comp = dict(v=0.5, a=1.5, z=0.5, t=0.2)
+
+ # Simulation
+ dataset_model_comp = hssm.simulate_data(
+ model="ddm", theta=param_dict_mod_comp, size=500
+ )
+
+ print(dataset_model_comp)
+ return (dataset_model_comp,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Defining the Models
+ """)
+ return
+
+
+@app.cell
+def _(dataset_model_comp, hssm):
+ # 'wrong' model
+ model_model_comp_1 = hssm.HSSM(
+ data=dataset_model_comp,
+ model="angle",
+ a=1.0,
+ )
+ return (model_model_comp_1,)
+
+
+@app.cell
+def _(dataset_model_comp, hssm):
+ # 'correct' model
+ model_model_comp_2 = hssm.HSSM(
+ data=dataset_model_comp,
+ model="angle",
+ a=1.5,
+ )
+ return (model_model_comp_2,)
+
+
+@app.cell
+def _(dataset_model_comp, hssm):
+ # 'wrong' model ddm
+ model_model_comp_3 = hssm.HSSM(
+ data=dataset_model_comp,
+ model="ddm",
+ a=1.0,
+ )
+ return (model_model_comp_3,)
+
+
+@app.cell
+def _(model_model_comp_1):
+ infer_data_model_comp_1 = model_model_comp_1.sample(
+ sampler="mcmc",
+ cores=1,
+ chains=2,
+ draws=1000,
+ tune=1000,
+ idata_kwargs=dict(
+ log_likelihood=True
+ ), # model comparison metrics usually need this!
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(model_model_comp_2):
+ infer_data_model_comp_2 = model_model_comp_2.sample(
+ sampler="mcmc",
+ cores=1,
+ chains=2,
+ draws=1000,
+ tune=1000,
+ idata_kwargs=dict(
+ log_likelihood=True
+ ), # model comparison metrics usually need this!
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(model_model_comp_3):
+ infer_data_model_comp_3 = model_model_comp_3.sample(
+ sampler="mcmc",
+ cores=1,
+ chains=2,
+ draws=1000,
+ tune=1000,
+ idata_kwargs=dict(
+ log_likelihood=True
+ ), # model comparison metrics usually need this!
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Compare
+ """)
+ return
+
+
+@app.cell
+def _(az, model_model_comp_1, model_model_comp_2, model_model_comp_3):
+ compare_data = az.compare(
+ {
+ "a_fixed_1(wrong)": model_model_comp_1.traces,
+ "a_fixed_1.5(correct)": model_model_comp_2.traces,
+ "a_fixed_1_ddm(wrong)": model_model_comp_3.traces,
+ }
+ )
+
+ compare_data
+ return (compare_data,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Notice how the posterior weight on the `correct` model is close to (or equal to ) $1$ here.
+ In other words model comparison points us to the correct model with
+ a very high degree of certainty here!
+
+
+ We can also use the `.plot_compare()` function to illustrate the model comparison visually.
+ """)
+ return
+
+
+@app.cell
+def _(az, compare_data):
+ az.plot_compare(compare_data)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Using the forest plot we can take a look at what goes wrong for the "wrong" model.
+
+ To make up for the mistplaced setting of the `a` parameter, the posterior seems to compensate by
+ mis-estimating the other parameters.
+ """)
+ return
+
+
+@app.cell
+def _(az, model_model_comp_1, model_model_comp_2, model_model_comp_3):
+ az.plot_forest(
+ [model_model_comp_1.traces, model_model_comp_2.traces, model_model_comp_3.traces],
+ model_names=["a_fixed_1(wrong)", "a_fixed_1.5(correct)", "a_fixed_1(wrong)_ddm"],
+ )
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## Closer look!
+
+
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We have seen a few examples of HSSM models at this point. Add a model via a string, maybe toy a bit with with the priors and set regression functions for a given parameter. Turn it hierarchical... Here we begin to peak a bit under the hood.
+
+ After all, we want to encourage you to contribute models to the package yourself.
+
+ Let's remind ourself of the `model_config` dictionaries that define model properties for us. Again let's start with the DDM.
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ hssm.config.default_model_config["ddm"].keys()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ The dictionary has a few high level keys.
+
+ 1. `response`
+
+ 2. `list_params`
+
+ 3. `description`
+
+ 4. `likelihoods`
+
+
+ Let us take a look at the available `likelihoods`:
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ hssm.config.default_model_config["ddm"]["likelihoods"]
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ For the DDM we have available all three types of likelihoods that HSSM deals with:
+
+ 1. `analytical`
+ 2. `approx_differentiable`
+ 3. `blackbox`
+
+ Let's expand the dictionary contents more:
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ hssm.config.default_model_config["ddm"]["likelihoods"]["analytical"]
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We see three properties (key) in this dictionary, of which two are essential:
+
+ - The `loglik` field, which points to the likelihood function
+ - The `backend` field, which can be either `None` (defaulting to pytensor for `analytical` likelihoods), `jax` or `pytensor`
+ - The `bounds` field, which specifies bounds on a subset of the model parameters
+ - The `default_priors` field, which specifies parameter wise priors
+
+ If you provide `bounds` for a parameter, but no `default_priors`, a *Uniform* prior that respects the specified bounds will be applied.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Next, let's look at the `approx_differentiable` part.
+ The likelihood in this part is based on a [LAN]() which was available in [HDDM]() through the [LAN extension]().
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ hssm.config.default_model_config["ddm"]["likelihoods"]["approx_differentiable"]
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We see that the `loglik` field is now a string that points to a `.onnx` file.
+ [Onnx](https://onnx.ai/) is a meta framework for Neural Network specification, that allows translation between deep learning Frameworks. This is the preferred format for the neural networks we store in our model reservoir on [HuggingFace](https://huggingface.co/).
+
+ Moreover notice that we now have a `backend` field. We allow for two primary backends in the `approx_differentiable` field.
+
+ 1. `pytensor`
+ 2. `jax`
+
+ The `jax` backend assumes that your likelihood is described as a jax function, the `pytensor` backend assumes that your likelihood is described as a `pytensor` function. Ok not that surprising...
+
+ We won't dwell on this here, however the key idea is to provide users with a large degree of flexibility in describing their likelihood functions and moreover to allow targeted optimization towards MCMC sampler types that [PyMC]() allows us to access.
+
+ You can find a [dedicated tutorial](https://lnccbrown.github.io/HSSM/tutorial_likelihoods/#3-kinds-of-likelihoods) in the documentation, which describes the different likelihoods in much more detail.
+
+ Instead, let's take a quick look at how these newfound insights can be used for custom model definition.
+ """)
+ return
+
+
+@app.cell
+def _(dataset_simulated, hssm):
+ hssm_alternative_model = hssm.HSSM(
+ data=dataset_simulated,
+ model="ddm",
+ loglik_kind="approx_differentiable",
+ )
+ return (hssm_alternative_model,)
+
+
+@app.cell
+def _(hssm_alternative_model):
+ hssm_alternative_model.loglik_kind
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ In this case we actually built the model class with an `approx_differentiable` LAN likelihood, instead of the default `analytical` likelihood we used in the beginning of the tutorial. The assumed generative model remains the `ddm` however!
+ """)
+ return
+
+
+@app.cell
+def _(hssm_alternative_model):
+ hssm_alternative_model.sample(
+ sampler="mcmc",
+ cores=1,
+ chains=2,
+ draws=1000,
+ tune=1000,
+ idata_kwargs=dict(
+ log_likelihood=False
+ ), # model comparison metrics usually need this!
+ mp_ctx="spawn",
+ )
+ return
+
+
+@app.cell
+def _(az, hssm_alternative_model):
+ az.plot_forest(hssm_alternative_model.traces)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We can take this further and specify a completely custom likelihood. See the [dedicated tutorial](https://lnccbrown.github.io/HSSM/tutorial_likelihoods/#using-custom-likelihoods) for more examples!
+
+ We will see one specific example below to illustrate another type of likelihood function we have available for model building in HSSM, the *Blackbox* likelihood.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## 'Blackbox' Likelihoods
+
+
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### What is a **Blackbox Likelihood Function**?
+
+ A *Blackbox Likelihood Function* is essentially any Python `callable` (function) that provides trial by trial likelihoods for your model of interest. What kind of computations are performed in this Python function is completely arbitrary.
+
+ E.g. you could built a function that performs forward simulation from you model, constructs are kernel-density estimate for the resulting likelihood functions and evaluates your datapoints on this ad-hoc generated approximate likelihood.
+
+ What I just described is a once state-of-the-art method of performing simulation based inference on Sequential Sampling models, a precursor to LANs if you will.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We will do something simpler to keep it short and sweet, but really... the possibilities are endless!
+
+
+
+
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Simulating simple dataset from the DDM
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ As always, let's begin by generating some simple dataset.
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ # Set parameters
+ param_dict_blackbox = dict(v=0.5, a=1.5, z=0.5, t=0.5)
+
+ # Simulate
+ dataset_blackbox = hssm.simulate_data(model="ddm", theta=param_dict_blackbox, size=1000)
+ return dataset_blackbox, param_dict_blackbox
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Define the likelihood
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Now the fun part... we simply define a Python function `my_blackbox_loglik` which takes in our `data` as well as a bunch of model parameters (in our case the familiar `v`,`a`, `z`, `t` from the DDM).
+
+ The function then does some arbitrary computation inside (in our case e.g. we pass the data and parameters to the DDM log-likelihood from our predecessor package HDDM).
+
+ The important part is that inside `my_blackbox_loglik` anything can happen. We happen to call a little custom function that defines the likelihood of a DDM.
+
+ **Fun fact:**
+ It is de-facto the likelihood which is called by [HDDM](https://hddm.readthedocs.io/en/latest/).
+ """)
+ return
+
+
+@app.cell
+def _(hddm_wfpt, np):
+ def my_blackbox_loglik(data, v, a, z, t, err=1e-8):
+ """Create a custom blackbox likelihood function."""
+ data = data[:, 0] * data[:, 1]
+ data_nrows = data.shape[0]
+ # Our function expects inputs as float64, but they are not guaranteed to
+ # come in as such --> we type convert
+ return hddm_wfpt.wfpt.wiener_logp_array(
+ np.float64(data),
+ (np.ones(data_nrows) * v).astype(np.float64),
+ np.ones(data_nrows) * 0,
+ (np.ones(data_nrows) * 2 * a).astype(np.float64),
+ (np.ones(data_nrows) * z).astype(np.float64),
+ np.ones(data_nrows) * 0,
+ (np.ones(data_nrows) * t).astype(np.float64),
+ np.ones(data_nrows) * 0,
+ err,
+ 1,
+ )
+ return (my_blackbox_loglik,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Define HSSM class with our Blackbox Likelihood
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We can now define our HSSM model class as usual, however passing our `my_blackbox_loglik()` function to the `loglik` argument, and passing as `loglik_kind = blackbox`.
+
+ The rest of the model config is as usual. Here we can reuse our `ddm` model config, and simply specify bounds on the parameters (e.g. your Blackbox Likelihood might be trustworthy only on a restricted parameters space).
+ """)
+ return
+
+
+@app.cell
+def _(bmb, dataset_blackbox, hssm, my_blackbox_loglik):
+ blackbox_model = hssm.HSSM(
+ data=dataset_blackbox,
+ model="ddm",
+ loglik=my_blackbox_loglik,
+ loglik_kind="blackbox",
+ model_config={
+ "bounds": {
+ "v": (-10.0, 10.0),
+ "a": (0.5, 5.0),
+ "z": (0.0, 1.0),
+ }
+ },
+ t=bmb.Prior("Uniform", lower=0.0, upper=2.0),
+ )
+ return (blackbox_model,)
+
+
+@app.cell
+def _(blackbox_model, mo):
+ blackbox_model_graph = blackbox_model.graph()
+
+ blackbox_model_graph_png = blackbox_model_graph.pipe(format="png")
+ mo.image(blackbox_model_graph_png)
+ return
+
+
+@app.cell
+def _(blackbox_model):
+ sample = blackbox_model.sample()
+ return (sample,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ **NOTE**:
+
+ Since *Blackbox likelihood functions* are assumed to not be differentiable, our default sampler for such likelihood functions is a `Slice` sampler. HSSM allows you to choose any other suitable sampler from the PyMC package instead. A bunch of options are available for gradient-free samplers.
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Results
+ """)
+ return
+
+
+@app.cell
+def _(az, sample):
+ az.summary(sample)
+ return
+
+
+@app.cell
+def _(az, param_dict_blackbox, plt, sample):
+ az.plot_trace(
+ sample,
+ lines=[(key_, {}, param_dict_blackbox[key_]) for key_ in param_dict_blackbox],
+ )
+ plt.tight_layout()
+ plt.gcf()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ## HSSM Random Variables in PyMC
+
+ We covered a lot of ground in this tutorial so far. You are now a sophisticated HSSM user.
+
+ It is therefore time to reveal a secret. We can actuallly peel back one more layer...
+
+
+
+
+ Instead of letting HSSM help you build the entire model, we can instead use HSSM to construct valid [PyMC](https://www.pymc.io/welcome.html) distributions and then proceed to build a custom PyMC model by ourselves...
+
+
+
+
+ We will illustrate the simplest example below. It sets a pattern that can be exploited for much more complicated modeling exercises, which importantly go far beyond what our basic HSSM class may facilitate for you!
+
+ See the [dedicated tutorial](https://lnccbrown.github.io/HSSM/notebooks/pymc/) in the [documentation](https://lnccbrown.github.io/HSSM/) if you are interested.
+
+ Let's start by importing a few convenience functions:
+ """)
+ return
+
+
+@app.cell
+def _():
+ # DDM models (the Wiener First-Passage Time distribution)
+ from hssm.distribution_utils import make_distribution
+ from hssm.likelihoods import DDM
+ return DDM, make_distribution
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Simulate some data
+ """)
+ return
+
+
+@app.cell
+def _(hssm):
+ # Simulate
+ param_dict_pymc = dict(v=0.5, a=1.5, z=0.5, t=0.5)
+
+ dataset_pymc = hssm.simulate_data(model="ddm", theta=param_dict_pymc, size=1000)
+ return dataset_pymc, param_dict_pymc
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Build a custom PyMC Model
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ We can now use our custom random variable `DDM` directly in a PyMC model.
+ """)
+ return
+
+
+@app.cell
+def _(DDM, dataset_pymc):
+ import pymc as pm
+
+ with pm.Model() as ddm_pymc:
+ v_custom = pm.Uniform("v", lower=-10.0, upper=10.0)
+ a_custom = pm.HalfNormal("a", sigma=2.0)
+ z_custom = pm.Uniform("z", lower=0.01, upper=0.99)
+ t_custom = pm.Uniform("t", lower=0.0, upper=0.6)
+
+ ddm = DDM(
+ "DDM", observed=dataset_pymc[["rt", "response"]].values, v=v_custom, a=a_custom, z=z_custom, t=t_custom,
+ )
+ return ddm_pymc, pm
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Let's check the model graph:
+ """)
+ return
+
+
+@app.cell
+def _(ddm_pymc, mo, pm):
+ ddm_pymc_graph = pm.model_to_graphviz(model=ddm_pymc)
+
+ ddm_pymc_graph_png = ddm_pymc_graph.pipe(format="png")
+ mo.image(ddm_pymc_graph_png)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Looks remarkably close to our HSSM version!
+
+ We can use PyMC directly to sample and finally return to ArviZ for some plotting!
+ """)
+ return
+
+
+@app.cell
+def _(ddm_pymc, pm):
+ with ddm_pymc:
+ ddm_pymc_trace = pm.sample()
+ return (ddm_pymc_trace,)
+
+
+@app.cell
+def _(az, ddm_pymc_trace, param_dict_pymc, plt):
+ az.plot_trace(
+ ddm_pymc_trace,
+ lines=[(key_, {}, param_dict_pymc[key_]) for key_ in param_dict_pymc],
+ )
+
+ plt.tight_layout()
+ plt.gcf()
+ return
+
+
+@app.cell
+def _(az, ddm_pymc_trace):
+ az.plot_forest(ddm_pymc_trace)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Alternative Models with PyMC
+
+ With very little extra work, we can in fact load any of the models accessible via HSSM. Here is an example, where we load the `angle` model instead.
+
+ We first construction the likelihood function, using `make_likelihood_callable()`.
+
+ Then we produce a valid `pymc.distribution` using the
+ `make_distribution()` utility function.
+
+ Just like the `DDM` class above, we can then use this distribution inside a **PyMC** model.
+ """)
+ return
+
+
+@app.cell
+def _(hssm, make_distribution):
+ from hssm.distribution_utils import make_likelihood_callable
+
+ angle_loglik_alt = make_likelihood_callable(
+ loglik="angle.onnx",
+ loglik_kind="approx_differentiable",
+ backend="jax",
+ params_is_reg=[0, 0, 0, 0, 0],
+ )
+
+ ANGLE_alt = make_distribution(
+ "angle",
+ loglik=angle_loglik_alt,
+ list_params=hssm.defaults.default_model_config["angle"]["list_params"],
+ )
+ return (make_likelihood_callable,)
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ Note that we need to supply the `params_is_reg` argument ("reg" for "regression").
+ This is a boolean vector, which specifies for each input to the likelihood function, whether or not it is defined to be "trial-wise", as is expected if the parameter
+ is the output e.g. of a regression function.
+ """)
+ return
+
+
+@app.cell
+def _(ANGLE, dataset_pymc, pm):
+ # Angle pymc
+ with pm.Model() as angle_pymc:
+ # Define parameters
+ v_alt = pm.Uniform("v", lower=-10.0, upper=10.0)
+ a_alt = pm.Uniform("a", lower=0.5, upper=2.5)
+ z_alt = pm.Uniform("z", lower=0.01, upper=0.99)
+ t_alt = pm.Uniform("t", lower=0.0, upper=0.6)
+ theta_alt = pm.Uniform("theta", lower=-0.1, upper=1.0)
+
+ # Our RV
+ angle_alt = ANGLE(
+ "ANGLE",
+ v=v_alt,
+ a=a_alt,
+ z=z_alt,
+ t=t_alt,
+ theta=theta_alt,
+ observed=dataset_pymc[["rt", "response"]].values,
+ )
+ return (angle_pymc,)
+
+
+@app.cell
+def _(angle_pymc, pm):
+ with angle_pymc:
+ idata_object = pm.sample(nuts_sampler="numpyro")
+ return (idata_object,)
+
+
+@app.cell
+def _(az, idata_object, param_dict_pymc, plt):
+ az.plot_trace(
+ idata_object, lines=[(key_, {}, param_dict_pymc[key_]) for key_ in param_dict_pymc]
+ )
+
+ plt.tight_layout()
+ plt.gcf()
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ ### Regression via PyMC
+
+ Finally to illustrate the usage of PyMC a little more elaborately, let us build a PyMC model with regression components.
+ """)
+ return
+
+
+@app.cell
+def _():
+ from typing import Optional
+
+
+ def make_params_is_reg_vec(
+ reg_parameters: Optional[list] = None, parameter_names: Optional[list] = None
+ ):
+ """Make a list of Trues and Falses to indicate which parameters are vectors."""
+ if (not isinstance(reg_parameters, list)) or (
+ not isinstance(parameter_names, list)
+ ):
+ raise ValueError("Both reg_parameters and parameter_names should be lists")
+
+ bool_list = [0] * len(parameter_names)
+ for param in reg_parameters:
+ bool_list[parameter_names.index(param)] = 1
+ return bool_list
+ return (make_params_is_reg_vec,)
+
+
+@app.cell
+def _(
+ hssm,
+ make_distribution,
+ make_likelihood_callable,
+ make_params_is_reg_vec,
+ np,
+ v_intercept,
+ v_x,
+ v_y,
+ x,
+ y,
+):
+ # Set up trial by trial parameters
+ v_intercept_pymc_reg = 0.3
+ x_pymc_reg = np.random.uniform(-1, 1, size=1000)
+ v_x_pymc_reg = 0.8
+ y_pymc_reg = np.random.uniform(-1, 1, size=1000)
+ v_y_pymc_reg = 0.3
+ v_pymc_reg = v_intercept + (v_x * x) + (v_y * y)
+
+ param_dict_pymc_reg = dict(
+ v_Intercept=v_intercept_pymc_reg,
+ v_x=v_x_pymc_reg,
+ v_y=v_y_pymc_reg,
+ v=v_pymc_reg,
+ a=1.5,
+ z=0.5,
+ t=0.1,
+ theta=0.0,
+ )
+
+ # base dataset
+ pymc_reg_data = hssm.simulate_data(model="ddm", theta=param_dict_pymc_reg, size=1)
+
+ # Adding covariates into the datsaframe
+ pymc_reg_data["x"] = x
+ pymc_reg_data["y"] = y
+
+ # Make the boolean vector for params_is_reg argument
+ bool_param_reg = make_params_is_reg_vec(
+ reg_parameters=["v"],
+ parameter_names=hssm.defaults.default_model_config["angle"]["list_params"],
+ )
+
+ angle_loglik_reg = make_likelihood_callable(
+ loglik="angle.onnx",
+ loglik_kind="approx_differentiable",
+ backend="jax",
+ params_is_reg=bool_param_reg,
+ )
+
+ ANGLE_reg = make_distribution(
+ "angle",
+ loglik=angle_loglik_reg,
+ list_params=hssm.defaults.default_model_config["angle"]["list_params"],
+ )
+ return (pymc_reg_data,)
+
+
+@app.cell
+def _(ANGLE, pm, pymc_reg_data):
+ import pytensor.tensor as pt
+
+ with pm.Model(
+ coords={
+ "idx": pymc_reg_data.index,
+ "resp": ["rt", "response"],
+ "features": ["x", "y"],
+ }
+ ) as pymc_model_reg:
+ # Features
+ x_ = pm.Data("x", pymc_reg_data["x"].values, dims="idx")
+ y_ = pm.Data("y", pymc_reg_data["y"].values, dims="idx")
+ # Target
+ obs = pm.Data("obs", pymc_reg_data[["rt", "response"]].values, dims=("idx", "resp"))
+
+ # Priors
+ a_reg = pm.Uniform("a", lower=0.5, upper=2.5)
+ z_reg = pm.Uniform("z", lower=0.01, upper=0.99)
+ t_reg = pm.Uniform("t", lower=0.0, upper=0.6)
+ theta_reg = pm.Uniform("theta", lower=-0.1, upper=1.0)
+ v_Intercept = pm.Uniform("v_Intercept", lower=-3, upper=3)
+ v_betas = pm.Normal("v_beta", mu=[0, 0], sigma=0.5, dims=("features"))
+
+ # Regression equation
+ v = pm.Deterministic(
+ "v", v_Intercept + pt.stack([x_, y_], axis=1) @ v_betas, dims="idx"
+ )
+
+ # Our RV
+ angle_reg = ANGLE(
+ "angle",
+ v=v.squeeze(),
+ a=a_reg,
+ z=z_reg,
+ t=t_reg,
+ theta=theta_reg,
+ observed=obs,
+ dims=("idx", "resp"),
+ )
+ return (pymc_model_reg,)
+
+
+@app.cell
+def _(pm, pymc_model_reg):
+ with pymc_model_reg:
+ idata_pymc_reg = pm.sample(
+ nuts_sampler="numpyro", idata_kwargs={"log_likelihood": True}
+ )
+ return (idata_pymc_reg,)
+
+
+@app.cell
+def _(az, idata_pymc_reg):
+ az.plot_forest(idata_pymc_reg, var_names=["~v"])
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ All layers peeled back, the only limit in your modeling endeavors becomes the limit of the PyMC universe!
+
+
+
+
+
+
+ Enjoy the exploration!
+ """)
+ return
+
+
+@app.cell(hide_code=True)
+def _(mo):
+ mo.md(r"""
+ # End
+ """)
+ return
+
+
+if __name__ == "__main__":
+ app.run()
diff --git a/docs/tutorials/public/ANGLE_pic.png b/docs/tutorials/public/ANGLE_pic.png
new file mode 100644
index 000000000..be84cc81d
Binary files /dev/null and b/docs/tutorials/public/ANGLE_pic.png differ
diff --git a/docs/tutorials/public/ANGLE_with_params_pic.png b/docs/tutorials/public/ANGLE_with_params_pic.png
new file mode 100644
index 000000000..ac42a6e04
Binary files /dev/null and b/docs/tutorials/public/ANGLE_with_params_pic.png differ
diff --git a/docs/tutorials/public/DDM_only_v_pic.png b/docs/tutorials/public/DDM_only_v_pic.png
new file mode 100644
index 000000000..beac829cd
Binary files /dev/null and b/docs/tutorials/public/DDM_only_v_pic.png differ
diff --git a/docs/tutorials/public/DDM_pic.png b/docs/tutorials/public/DDM_pic.png
new file mode 100644
index 000000000..fe1a7f133
Binary files /dev/null and b/docs/tutorials/public/DDM_pic.png differ
diff --git a/docs/tutorials/public/DDM_with_params_pic.png b/docs/tutorials/public/DDM_with_params_pic.png
new file mode 100644
index 000000000..ecfe4bca9
Binary files /dev/null and b/docs/tutorials/public/DDM_with_params_pic.png differ
diff --git a/docs/tutorials/public/HSSM_logo.png b/docs/tutorials/public/HSSM_logo.png
new file mode 100644
index 000000000..7488d8e84
Binary files /dev/null and b/docs/tutorials/public/HSSM_logo.png differ
diff --git a/docs/tutorials/public/arviz.png b/docs/tutorials/public/arviz.png
new file mode 100644
index 000000000..6f04c9ee4
Binary files /dev/null and b/docs/tutorials/public/arviz.png differ
diff --git a/docs/tutorials/public/bambi.png b/docs/tutorials/public/bambi.png
new file mode 100644
index 000000000..69a2af71e
Binary files /dev/null and b/docs/tutorials/public/bambi.png differ
diff --git a/docs/tutorials/public/blackbox.png b/docs/tutorials/public/blackbox.png
new file mode 100644
index 000000000..37c4c27ed
Binary files /dev/null and b/docs/tutorials/public/blackbox.png differ
diff --git a/docs/tutorials/public/hierarchical_modeling.png b/docs/tutorials/public/hierarchical_modeling.png
new file mode 100644
index 000000000..2f20195c5
Binary files /dev/null and b/docs/tutorials/public/hierarchical_modeling.png differ
diff --git a/docs/tutorials/public/onnx.png b/docs/tutorials/public/onnx.png
new file mode 100644
index 000000000..d575d91f4
Binary files /dev/null and b/docs/tutorials/public/onnx.png differ
diff --git a/docs/tutorials/public/pytensor_jax.png b/docs/tutorials/public/pytensor_jax.png
new file mode 100644
index 000000000..68101e227
Binary files /dev/null and b/docs/tutorials/public/pytensor_jax.png differ
diff --git a/pyproject.toml b/pyproject.toml
index 996f7ac35..e2faa8647 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -31,6 +31,7 @@ dependencies = [
"hddm-wfpt>=0.1.6",
"huggingface-hub>=0.34.0",
"jaxonnxruntime>=0.3.0",
+ "marimo>=0.18.3",
"numpyro>=0.19",
"onnx>=1.16.0",
"pandas>=2.2,<3",