How to use the pymc3.Gamma function in pymc3

To help you get started, we’ve selected a few pymc3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aloctavodia / Doing_bayesian_data_analysis / 09_BernBetaMuKappaPyMC.py View on Github external
coin = []  # list/vector index for each coins (from 0 to number of coins)
y = []  # list/vector with head (1) or tails (0) for each flip.
for i, flips in enumerate(N):
    heads = z[i]
    if  heads > flips:
        sys.exit("The number of heads can't be greater than the number of flips")
    else:
        y = y + [1] * heads + [0] * (flips-heads)
        coin = coin + [i] * flips


# Specify the model in PyMC
with pm.Model() as model:
# define the hyperparameters
    mu = pm.Beta('mu', 2, 2)
    kappa = pm.Gamma('kappa', 1, 0.1)
    # define the prior
    theta = pm.Beta('theta', mu * kappa, (1 - mu) * kappa, shape=len(N))
    # define the likelihood
    y = pm.Bernoulli('y', p=theta[coin], observed=y)

#   Generate a MCMC chain

    trace = pm.sample(1000, progressbar=False)


## Check the results.

## Print summary for each trace
#pm.df_summary(trace)
#pm.df_summary(trace)
github aloctavodia / Doing_bayesian_data_analysis / 09_FilconPyMC_ex9.2.B.py View on Github external
38, 43, 58, 60, 44, 44, 32, 56, 43, 36, 38, 48, 32, 40, 40, 34, 45, 42, 41, 32,
48, 36, 29, 37, 53, 55, 50, 47, 46, 44, 50, 56, 58, 42, 58, 54, 57, 54, 51, 49,
52, 51, 49, 51, 46, 46, 42, 49, 46, 56, 42, 53, 55, 51, 55, 49, 53, 55, 40, 46,
56, 47, 54, 54, 42, 34, 35, 41, 48, 46, 39, 55, 30, 49, 27, 51, 41, 36, 45, 41,
53, 32, 43, 33])
condition = np.repeat([0,1,2,3], nSubj)

# Specify the model in PyMC
with pm.Model() as model:
    # define the hyper-hyperparameters for kappa
    mean_gamma = pm.Uniform('mean_gamma', 0, 30)
    sd_gamma = pm.Uniform('sd_gamma', 0, 30)
    s_kappa = mean_gamma**2/sd_gamma**2
    r_kappa = mean_gamma/sd_gamma**2
    # define the hyperparameters
    kappa = pm.Gamma('kappa', s_kappa, r_kappa)
    mu = pm.Beta('mu', 1, 1, shape=ncond)
    # define the prior
    theta = pm.Beta('theta', mu[condition] * kappa, (1 - mu[condition]) * kappa, shape=len(z))
    # define the likelihood
    y = pm.Binomial('y', p=theta, n=N, observed=z)
    trace = pm.sample(2000, tune=1000)

## Check the results.
burnin = 0  # posterior samples to discard

## Print summary for each trace
#pm.df_summary(trace[burnin:])
#pm.df_summary(trace)

## Check for mixing and autocorrelation
#pm.autocorrplot(trace, varnames=['mu', 'kappa'])
github pymc-devs / pymc3 / benchmarks / benchmarks / benchmarks.py View on Github external
def mixture_model(random_seed=1234):
    """Sample mixture model to use in benchmarks"""
    np.random.seed(1234)
    size = 1000
    w_true = np.array([0.35, 0.4, 0.25])
    mu_true = np.array([0., 2., 5.])
    sigma = np.array([0.5, 0.5, 1.])
    component = np.random.choice(mu_true.size, size=size, p=w_true)
    x = np.random.normal(mu_true[component], sigma[component], size=size)

    with pm.Model() as model:
        w = pm.Dirichlet('w', a=np.ones_like(w_true))
        mu = pm.Normal('mu', mu=0., sd=10., shape=w_true.shape)
        enforce_order = pm.Potential('enforce_order', tt.switch(mu[0] - mu[1] <= 0, 0., -np.inf) +
                                                      tt.switch(mu[1] - mu[2] <= 0, 0., -np.inf))
        tau = pm.Gamma('tau', alpha=1., beta=1., shape=w_true.shape)
        pm.NormalMixture('x_obs', w=w, mu=mu, tau=tau, observed=x)

    # Initialization can be poorly specified, this is a hack to make it work
    start = {
        'mu': mu_true.copy(),
        'tau_log__': np.log(1. / sigma**2),
        'w_stickbreaking__': np.array([-0.03,  0.44])
    }
    return model, start
github pymc-learn / pymc-learn / pmlearn / gaussian_process / gpr.py View on Github external
"""
        model_input = theano.shared(np.zeros([self.num_training_samples,
                                              self.num_pred]))

        model_output = theano.shared(np.zeros(self.num_training_samples))

        self.shared_vars = {
            'model_input': model_input,
            'model_output': model_output,
        }

        self.gp = None
        model = pm.Model()

        with model:
            length_scale = pm.Gamma('length_scale', alpha=2, beta=0.5,
                                    shape=(1, self.num_pred))
            signal_variance = pm.HalfCauchy('signal_variance', beta=2,
                                            shape=1)
            noise_variance = pm.HalfCauchy('noise_variance', beta=2,
                                           shape=1)
            degrees_of_freedom = pm.Gamma('degrees_of_freedom', alpha=2,
                                          beta=0.1, shape=1)

            if self.kernel is None:
                cov_function = signal_variance ** 2 * RBF(
                    input_dim=self.num_pred,
                    ls=length_scale)
            else:
                cov_function = self.kernel

            if self.prior_mean is None:
github gift-surg / NiftyMIC / niftymic / utilities / robust_motion_estimator.py View on Github external
def _run_robust_gaussian_process_regression_map(x, y):
        x = np.array(x)
        with pymc3.Model() as model:
            ell = pymc3.Gamma("ell", alpha=2, beta=1)
            eta = pymc3.HalfCauchy("eta", beta=5)

            cov = eta**2 * pymc3.gp.cov.Matern52(1, ell)
            gp = pymc3.gp.Latent(cov_func=cov)

            f = gp.prior("f", X=x.reshape(-1, 1))

            sigma = pymc3.HalfCauchy("sigma", beta=2)
            # sigma = pymc3.Normal("sigma")
            # sigma = 0.1
            nu = pymc3.Gamma("nu", alpha=2, beta=1)
            # sigma = 0.01
            # nu = 0.01
            # y_ = pymc3.StudentT("y", mu=f, lam=1.0/sigma, nu=nu, observed=y)
            y_ = pymc3.StudentT("y", mu=f, sd=sigma, nu=nu, observed=y)

            # res = pymc3.find_MAP(model=model, method="L-BFGS-B")
            res = pymc3.find_MAP(vars=[f], method="L-BFGS-B")
            return res["f"]
github luke14free / pm-prophet / pmprophet / model.py View on Github external
self.priors["regressors"] = pm.Laplace(
                        "regressors_%s" % self.name,
                        0,
                        self.regressors_prior_scale,
                        shape=len(self.regressors),
                    )
            if self.growth and "growth" not in self.priors:
                self.priors["growth"] = pm.Normal("growth_%s" % self.name, 0, 0.1)
            if (
                len(self.changepoints)
                and "changepoints" not in self.priors
                and len(self.changepoints)
            ):
                if self.auto_changepoints:
                    k = self.n_changepoints
                    alpha = pm.Gamma("alpha", 1.0, 1.0)
                    beta = pm.Beta("beta", 1.0, alpha, shape=k)
                    w1 = pm.Deterministic(
                        "w1",
                        tt.concatenate([[1], tt.extra_ops.cumprod(1 - beta)[:-1]])
                        * beta,
                    )
                    w, _ = theano.map(
                        fn=lambda x: tt.switch(tt.gt(x, 1e-4), x, 0), sequences=[w1]
                    )
                    self.w = pm.Deterministic("w", w)
                else:
                    k = len(self.changepoints)
                    w = 1
                cgpt = pm.Deterministic(
                    "cgpt",
                    pm.Laplace("cgpt_inner", 0, self.changepoints_prior_scale, shape=k)
github aloctavodia / Doing_bayesian_data_analysis / 12_OneOddGroupModelComp.py View on Github external
binom.rvs(n=ntrl, p=.49, size=npg),
                binom.rvs(n=ntrl, p=.51, size=npg)))

n_subj = len(cond_of_subj)
n_cond = len(set(cond_of_subj))


# THE MODEL
with pm.Model() as model:
    # Hyperprior on model index:
    model_index = pm.DiscreteUniform('model_index', lower=0, upper=1)
    # Constants for hyperprior:
    shape_Gamma = 1.0
    rate_Gamma = 0.1
    # Hyperprior on mu and kappa:
    kappa = pm.Gamma('kappa', shape_Gamma, rate_Gamma, shape=n_cond)

    mu0 = pm.Beta('mu0', 1, 1)
    a_Beta0 = mu0 * kappa[cond_of_subj]
    b_Beta0 = (1 - mu0) * kappa[cond_of_subj]

    mu1 = pm.Beta('mu1', 1, 1, shape=n_cond)
    a_Beta1 = mu1[cond_of_subj] * kappa[cond_of_subj]
    b_Beta1 = (1 - mu1[cond_of_subj]) * kappa[cond_of_subj]

    #Prior on theta
    theta0 = pm.Beta('theta0', a_Beta0, b_Beta0, shape=n_subj)
    theta1 = pm.Beta('theta1', a_Beta1, b_Beta1, shape=n_subj)
    # if model_index == 0 then sample from theta1 else sample from theta0
    theta = pm.math.switch(pm.math.eq(model_index, 0), theta1, theta0)

    # Likelihood:
github pymc-learn / pymc-learn / pmlearn / mixture / dirichlet_process.py View on Github external
#
        #     return logp_

        def stick_breaking(v):
            portion_remaining = tt.concatenate(
                [[1], tt.extra_ops.cumprod(1 - v)[:-1]])
            return v * portion_remaining

        model = pm.Model()

        with model:

            K = self.num_truncate
            D = self.num_pred

            alpha = pm.Gamma('alpha', 1.0, 1.0)
            v = pm.Beta('v', 1, alpha, shape=K)
            pi_ = stick_breaking(v)
            pi = pm.Deterministic('pi', pi_/pi_.sum())

            means = tt.stack([pm.Uniform('cluster_center_{}'.format(k),
                                        lower=0.,
                                        upper=10.,
                                        shape=D) for k in range(K)])

            lower = tt.stack([pm.LKJCholeskyCov(
                'cluster_variance_{}'.format(k),
                n=D,
                eta=2.,
                sd_dist=pm.HalfNormal.dist(sd=1.)) for k in range(K)])

            chol = tt.stack([pm.expand_packed_triangular(
github aloctavodia / Doing_bayesian_data_analysis / 18_ANOVAonewayNonhomogvarBrugs.py View on Github external
cmpVec[g1idx] = -1
            cmpVec[g2idx] = 1
            contrast_dict = (contrast_dict, cmpVec)


z = (y - np.mean(y))/np.std(y)


## THE MODEL.
with pm.Model() as model:
    # define the hyperpriors
    a_SD_unabs = pm.StudentT('a_SD_unabs', mu=0, lam=0.001, nu=1)
    a_SD = abs(a_SD_unabs) + 0.1
    atau = 1 / a_SD**2
    m = pm.Gamma('m', 1, 1)
    d = pm.Gamma('d', 1, 1)
    sG = m**2 / d**2 
    rG = m / d**2 
    # define the priors
    tau = pm.Gamma('tau', sG, rG) 
    a0 = pm.Normal('a0', mu=0, tau=0.001) # y values are assumed to be standardized
    a = pm.Normal('a', mu=0 , tau=atau, shape=NxLvl)
    
    b = pm.Deterministic('b', a - tt.mean(a))
    mu = a0 + b[x]
    # define the likelihood
    yl = pm.Normal('yl', mu=mu, tau=tau, observed=z)
    # Generate a MCMC chain
    trace = pm.sample(2000)


# EXAMINE THE RESULTS