Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
probml
GitHub Repository: probml/pyprobml
Path: blob/master/notebooks/book1/04/laplace_approx_beta_binom_pymc3.ipynb
1192 views
Kernel: Python [conda env:py3713]

Pymc

import jax import jax.numpy as jnp from jax import lax import numpy as np import matplotlib.pyplot as plt try: from tensorflow_probability.substrates import jax as tfp except ModuleNotFoundError: %pip install -qqq tensorflow_probability from tensorflow_probability.substrates import jax as tfp dist = tfp.distributions try: import pymc3 as pm except ModuleNotFoundError: %pip install -qq pymc3 import pymc3 as pm try: import scipy.stats as stats except ModuleNotFoundError: %pip install -qq scipy import scipy.stats as stats import scipy.special as sp try: import arviz as az except ModuleNotFoundError: %pip install -qq arviz import arviz as az import math
# Use same data as https://github.com/probml/probml-notebooks/blob/main/notebooks/beta_binom_approx_post_pymc.ipynb key = jax.random.PRNGKey(128) dataset = np.repeat([0, 1], (10, 1)) n_samples = len(dataset) print(f"Dataset: {dataset}") n_heads = dataset.sum() n_tails = n_samples - n_heads
WARNING:absl:No GPU/TPU found, falling back to CPU. (Set TF_CPP_MIN_LOG_LEVEL=0 and rerun for more info.)
Dataset: [0 0 0 0 0 0 0 0 0 0 1]
# prior distribution ~ Beta def prior_dist(): return dist.Beta(concentration1=1.0, concentration0=1.0) # likelihood distribution ~ Bernoulli def likelihood_dist(theta): return dist.Bernoulli(probs=theta)
# closed form of beta posterior a = prior_dist().concentration1 b = prior_dist().concentration0 exact_posterior = dist.Beta(concentration1=a + n_heads, concentration0=b + n_tails) theta_range = jnp.linspace(0.01, 0.99, 100) ax = plt.gca() ax2 = ax.twinx() (plt2,) = ax2.plot(theta_range, exact_posterior.prob(theta_range), "g--", label="True Posterior") (plt3,) = ax2.plot(theta_range, prior_dist().prob(theta_range), label="Prior") likelihood = jax.vmap(lambda x: jnp.prod(likelihood_dist(x).prob(dataset)))(theta_range) (plt1,) = ax.plot(theta_range, likelihood, "r-.", label="Likelihood") ax.set_xlabel("theta") ax.set_ylabel("Likelihood") ax2.set_ylabel("Prior & Posterior") ax2.legend(handles=[plt1, plt2, plt3], bbox_to_anchor=(1.6, 1));
Image in a Jupyter notebook
# Laplace with pm.Model() as normal_aproximation: theta = pm.Beta("theta", 1.0, 1.0) y = pm.Binomial("y", n=1, p=theta, observed=dataset) # Bernoulli mean_q = pm.find_MAP() std_q = ((1 / pm.find_hessian(mean_q, vars=[theta])) ** 0.5)[0] print(theta) print(pm.find_hessian(mean_q, vars=[theta])) loc = mean_q["theta"] # plt.savefig('bb_laplace.pdf');
theta ~ Beta [[133.09999067]]
loc, std_q, mean_q
(array(0.09090909), array([0.08667842]), {'theta_logodds__': array(-2.30258505), 'theta': array(0.09090909)})
x = theta_range plt.figure() plt.plot(x, stats.norm.pdf(x, loc, std_q), "--", label="Laplace") post_exact = stats.beta.pdf(x, n_heads + 1, n_tails + 1) plt.plot(x, post_exact, label="exact") plt.title("Quadratic approximation") plt.xlabel("θ", fontsize=14) plt.yticks([]) plt.legend()
<matplotlib.legend.Legend at 0x7fd94c0bb590>
Image in a Jupyter notebook