How to use the pymc3.Normal function in pymc3

To help you get started, we’ve selected a few pymc3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github SimonOuellette35 / BayesianRL / rl_agent.py View on Github external
plt.scatter(s_short, r_short, color='red')
        plt.scatter(s_long, r_long, color='green')
        plt.scatter(s_flat, r_flat, color='black')
        plt.title("State vs Reward scatter plot")
        plt.show()

        # qvalues = [N x 3]
        # 1 - action index
        # 2 - state index
        # 3 - reward
        qvalues = np.array(full_tensor)

        with pm.Model() as self.model:

            Qmus = pm.Normal('Qmus', mu=self.Qmus_estimates_mu, sd=self.Qmus_estimates_sd, shape=[3, self.D])
            Qsds = pm.Normal('Qsds', mu=self.Qsds_estimates_mu, sd=self.Qsds_estimates_sd, shape=[3, self.D])

            idx0 = qvalues[:, 0].astype(int)
            idx1 = qvalues[:, 1].astype(int)

            pm.Normal('likelihood', mu=Qmus[idx0, idx1], sd=np.exp(Qsds[idx0, idx1]), observed=qvalues[:, 2])

            mean_field = pm.fit(n=15000, method='advi', obj_optimizer=pm.adam(learning_rate=0.1))
            self.trace = mean_field.sample(5000)

        self.Qmus_estimates = np.mean(self.trace['Qmus'], axis=0)
        self.Qsds_estimates = np.mean(self.trace['Qsds'], axis=0)

        self.Qmus_estimates_mu = self.Qmus_estimates
        self.Qmus_estimates_sd = np.std(self.trace['Qmus'], axis=0)

        self.Qsds_estimates_mu = self.Qsds_estimates
github parsing-science / ps-toolkit / ps_toolkit / pymc3_models / HLR.py View on Github external
self.shared_vars = {
            'model_input': model_input,
            'model_output': model_output,
            'model_cats': model_cats
        }

        model = pm.Model()

        with model:
            mu_alpha = pm.Normal('mu_alpha', mu=0, sd=100)
            sigma_alpha = pm.HalfNormal('sigma_alpha', sd=100)

            mu_beta = pm.Normal('mu_beta', mu=0, sd=100)
            sigma_beta = pm.HalfNormal('sigma_beta', sd=100)

            alpha = pm.Normal('alpha', mu=mu_alpha, sd=sigma_alpha, shape=(self.num_cats,))
            beta = pm.Normal('beta', mu=mu_beta, sd=sigma_beta, shape=(self.num_cats, self.num_pred))

            c = model_cats

            temp = alpha[c] + T.sum(beta[c] * model_input, 1)

            p = pm.invlogit(temp)

            o = pm.Bernoulli('o', p, observed=model_output)

        return model
github arviz-devs / arviz / examples / energyplot.py View on Github external
_thumb: .7, .5
"""
import arviz as az
import numpy as np
import pymc3 as pm

az.style.use('arviz-darkgrid')

# Data of the Eight Schools Model
J = 8
y = np.array([28.,  8., -3.,  7., -1.,  1., 18., 12.])
sigma = np.array([15., 10., 16., 11.,  9., 11., 10., 18.])


with pm.Model('Centered Eight Schools') as centered_eight:
    mu = pm.Normal('mu', mu=0, sd=5)
    tau = pm.HalfCauchy('tau', beta=5)
    theta = pm.Normal('theta', mu=mu, sd=tau, shape=J)
    obs = pm.Normal('obs', mu=theta, sd=sigma, observed=y)
    centered_eight_trace = pm.sample()

az.energyplot(centered_eight_trace, figsize=(12, 8))
github pymc-devs / pymc3 / pymc3 / gp / gp.py View on Github external
def _build_prior(self, name, X, reparameterize=True, **kwargs):
        mu = self.mean_func(X)
        cov = stabilize(self.cov_func(X))
        shape = infer_shape(X, kwargs.pop("shape", None))
        if reparameterize:
            v = pm.Normal(name + "_rotated_", mu=0.0, sd=1.0, shape=shape, **kwargs)
            f = pm.Deterministic(name, mu + cholesky(cov).dot(v))
        else:
            f = pm.MvNormal(name, mu=mu, cov=cov, shape=shape, **kwargs)
        return f
github aloctavodia / Doing_bayesian_data_analysis / 19_ANOVAtwowayPyMC.py View on Github external
a1tau = 1 / a1_SD**2

    a2_SD_unabs = pm.StudentT('a2_SD_unabs', mu=0, lam=0.001, nu=1)
    a2_SD = abs(a2_SD_unabs) + 0.1
    a2tau = 1 / a2_SD**2
    
    a1a2_SD_unabs = pm.StudentT('a1a2_SD_unabs', mu=0, lam=0.001, nu=1)
    a1a2_SD = abs(a1a2_SD_unabs) + 0.1
    a1a2tau = 1 / a1a2_SD**2


    # define the priors
    sigma = pm.Uniform('sigma', 0, 10) # y values are assumed to be standardized
    tau = 1 / sigma**2
    
    a0 = pm.Normal('a0', mu=0, tau=0.001) # y values are assumed to be standardized
   
    a1 = pm.Normal('a1', mu=0 , tau=a1tau, shape=Nx1Lvl)
    a2 = pm.Normal('a2', mu=0 , tau=a2tau, shape=Nx2Lvl)
    a1a2 = pm.Normal('a1a2', mu=0 , tau=a1a2tau, shape=[Nx1Lvl, Nx2Lvl])

    b1 = pm.Deterministic('b1', a1 - tt.mean(a1))
    b2 = pm.Deterministic('b2', a2 - tt.mean(a2))
    b1b2 = pm.Deterministic('b1b2', a1a2 - tt.mean(a1a2))
    
    mu = a0 + b1[x1] + b2[x2] + b1b2[x1, x2]
 
    # define the likelihood
    yl = pm.Normal('yl', mu=mu, tau=tau, observed=z)

    # Generate a MCMC chain
    trace = pm.sample(2000)
github cshenton / normal-comparison / experiments / pymc_classes.py View on Github external
def __init__(self):
        self.is_fit = False

        with pm.Model() as self.model:
            self.mu = pm.Normal('mu', mu=0.0, sd=5.0)
            self.inv_softplus_sigma = pm.Normal(
                'inv_softplus_sigma', mu=0.0, sd=1.0)
github SimonOuellette35 / PyData-talk---Intro-to-PyMC3 / Correlations.py View on Github external
def custom_likelihood(x_diffs, y_obs_last, y_obs):

        # Model is: y(t) = y(t-1) + correlation * [x(t) - x(t-1)]
        expected = y_obs_last + corr * x_diffs
        return pm.Normal.dist(mu=expected, sd=0.01).logp(y_obs)
github tcassou / causal_impact / lib / bsts.py View on Github external
:return: None
        """
        with Model() as self.model:
            # Variance of y
            sigma_eps = HalfNormal('sigma_eps', sd=1.)
            # Regression coefs
            alpha = Normal('alpha', mu=0, sd=10)
            beta = Normal('beta', mu=0, sd=10, shape=features.shape[1])
            reg = alpha + sum(beta[i] * features.iloc[:, i] for i in range(features.shape[1]))
            # Trend
            sigma_u = HalfNormal('sigma_u', sd=1.)
            sigma_v = HalfNormal('sigma_v', sd=1.)
            delta = GaussianRandomWalk('delta', sigma_v ** -2, shape=observed.shape[0] - 1)
            trend = GaussianRandomWalk('trend', sigma_u ** -2, mu=delta, shape=observed.shape[0])
            # Observation
            Normal('y', mu=trend + reg, sd=sigma_eps, observed=observed)

        with self.model:
            self.trace = sample(
                n_samples,
                progressbar=True,
            )
github rodluger / starry / exoplanet_doppler.py View on Github external
inc,
        obl,
        veq, 
        alpha,
        theta,
        _x,
        _y,
        _z, 
        rs
    )

    # Here we track the value of the model light curve for plotting later
    pm.Deterministic("rv_model", rv_model)

    # The likelihood function assuming known Gaussian uncertainty
    pm.Normal("obs", mu=rv_model, sd=truths["rv_err"], observed=rv)

    # Fit for the maximum a posteriori parameters
    map_soln = xo.optimize(start=model.test_point)
github pymc-devs / pymc3 / pymc3 / examples / wishart.py View on Github external
# Covariance matrix we want to recover
covariance = np.matrix([[2, .5, -.5],
                        [.5, 1.,  0.],
                        [-.5, 0., 0.5]])

prec = np.linalg.inv(covariance)

mean = [.5, 1, .2]
data = scipy.stats.multivariate_normal(mean, covariance).rvs(5000)

plt.scatter(data[:, 0], data[:, 1])

with pm.Model() as model:
    S = np.eye(3)
    nu = 5
    mu = pm.Normal('mu', mu=0, sd=1, shape=3)

    # Use the transformed Wishart distribution
    # Under the hood this will do a Cholesky decomposition
    # of S and add two RVs to the sampler: c and z
    prec = pm.WishartBartlett('prec', S, nu)

    # To be able to compare it to truth, convert precision to covariance
    cov = pm.Deterministic('cov', tt.nlinalg.matrix_inverse(prec))

    lp = pm.MvNormal('likelihood', mu=mu, tau=prec, observed=data)

    start = pm.find_MAP()
    step = pm.NUTS(scaling=start)


def run(n=3000):