How to use the pymc3.HalfNormal function in pymc3

To help you get started, we’ve selected a few pymc3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github tcassou / causal_impact / lib / bsts.py View on Github external
def fit(self, features, observed, n_samples):
        """Fit model to data.

        :param pandas.DataFrame features: time series to use as control set
        :param pandas.Series observed: time series to be modeled
        :param int n_samples: number of samples in MCMC

        :return: None
        """
        with Model() as self.model:
            # Variance of y
            sigma_eps = HalfNormal('sigma_eps', sd=1.)
            # Regression coefs
            alpha = Normal('alpha', mu=0, sd=10)
            beta = Normal('beta', mu=0, sd=10, shape=features.shape[1])
            reg = alpha + sum(beta[i] * features.iloc[:, i] for i in range(features.shape[1]))
            # Trend
            sigma_u = HalfNormal('sigma_u', sd=1.)
            sigma_v = HalfNormal('sigma_v', sd=1.)
            delta = GaussianRandomWalk('delta', sigma_v ** -2, shape=observed.shape[0] - 1)
            trend = GaussianRandomWalk('trend', sigma_u ** -2, mu=delta, shape=observed.shape[0])
            # Observation
            Normal('y', mu=trend + reg, sd=sigma_eps, observed=observed)

        with self.model:
            self.trace = sample(
                n_samples,
                progressbar=True,
github parsing-science / ps-toolkit / ps_toolkit / pymc3_models / HLR.py View on Github external
model_output = theano.shared(np.zeros(1))

        model_cats = theano.shared(np.zeros(1, dtype='int'))

        self.shared_vars = {
            'model_input': model_input,
            'model_output': model_output,
            'model_cats': model_cats
        }

        model = pm.Model()

        with model:
            mu_alpha = pm.Normal('mu_alpha', mu=0, sd=100)
            sigma_alpha = pm.HalfNormal('sigma_alpha', sd=100)

            mu_beta = pm.Normal('mu_beta', mu=0, sd=100)
            sigma_beta = pm.HalfNormal('sigma_beta', sd=100)

            alpha = pm.Normal('alpha', mu=mu_alpha, sd=sigma_alpha, shape=(self.num_cats,))
            beta = pm.Normal('beta', mu=mu_beta, sd=sigma_beta, shape=(self.num_cats, self.num_pred))

            c = model_cats

            temp = alpha[c] + T.sum(beta[c] * model_input, 1)

            p = pm.invlogit(temp)

            o = pm.Bernoulli('o', p, observed=model_output)

        return model
github pymc-devs / pymc3 / pymc3 / examples / arma_example.py View on Github external
def build_model():
    y = shared(np.array([15, 10, 16, 11, 9, 11, 10, 18], dtype=np.float32))
    with pm.Model() as arma_model:
        sigma = pm.HalfNormal('sigma', 5.)
        theta = pm.Normal('theta', 0., sigma=1.)
        phi = pm.Normal('phi', 0., sigma=2.)
        mu = pm.Normal('mu', 0., sigma=10.)

        err0 = y[0] - (mu + phi * mu)

        def calc_next(last_y, this_y, err, mu, phi, theta):
            nu_t = mu + phi * last_y + theta * err
            return this_y - nu_t

        err, _ = scan(fn=calc_next,
                      sequences=dict(input=y, taps=[-1, 0]),
                      outputs_info=[err0],
                      non_sequences=[mu, phi, theta])

        pm.Potential('like', pm.Normal.dist(0, sigma=sigma).logp(err))
github nonabelian / dragon_kings / lppl_pymc.py View on Github external
# Setting initial points to basinhopping results
        o = pm.Normal('o', mu=o_b, sd=10)
        m = pm.Normal('m', mu=m_b, sd=10)
        A = pm.Normal('A', mu=A_b, sd=10)
        C = pm.Normal('C', mu=C_b, sd=10)
        t = pm.Normal('t', mu=t_b, sd=10)

        # Or try with curve fit initial points
        # o = pm.Normal('o', mu=21.0, sd=10)
        # m = pm.Normal('m', mu=md, sd=10)
        # A = pm.Normal('A', mu=Ad, sd=10)
        # C = pm.Normal('C', mu=Cd, sd=10)
        # t = pm.Normal('t', mu=td, sd=10)

        y_mu = A + t**m * xd**m + C * t**m * xd**m * np.cos(o * np.log(xd))
        y_sd = pm.HalfNormal('y_sd', sd=10)

        y_obs = pm.Normal('y_obs', mu=y_mu, sd=y_sd, observed=yd)

        trace = pm.sample(tune=1000)

    ppc = pm.sample_ppc(trace, model=model)
    y_model = ppc['y_obs']

    fig = plt.figure(figsize=(12,8))
    ax = fig.add_subplot(111)
    for row in y_model:
        ax.scatter(xd, row, color='blue', alpha=0.01)

    ax.scatter(xd, yd, color='black', alpha=0.5)

    fname = "lppl_basinhopping_plus_pymc3_fit.png"
github alan-turing-institute / skpro / skpro / pymc / estimators.py View on Github external
def _default_estimation_model(X, y):
    with pm.Model() as model:
        alpha = pm.Normal("alpha", mu=y.mean(), sd=10)
        betas = pm.Normal("betas", mu=0, sd=10, shape=X.shape[1])
        sigma = pm.HalfNormal("sigma", sd=10)

        mu = alpha + pm.math.dot(betas, X.T)
        likelihood = pm.Normal("likelihood", mu=mu, sd=sigma, observed=y)

        step = pm.NUTS()
        trace = pm.sample(1000, step)

    return trace
github jakdot / pyactr / tutorials / u8_estimating_using_pymc3.py View on Github external
counting.goal.add(actr.chunkstring(string="isa countFrom start 2 end 4"))
counting.model_parameters["rule_firing"] = math.exp(map_estimate['rule_firing_log_'])
counting.model_parameters["latency_factor"] = math.exp(map_estimate['lf_log_'])
sim = counting.simulation(trace=True)
sim.run()

print("Search for parameters using Slice/Metropolis.")

basic_model = Model()

with basic_model:

    # Priors for unknown model parameters
    rule_firing = HalfNormal('rule_firing', sd=2, testval=abs(np.random.randn(1)[0]))
    lf = HalfNormal('lf', sd=2, testval=abs(np.random.randn(1)[0]))

    sigma = HalfNormal('sigma', sd=1)

    #you can print searched values after every iteration
    lf_print = T.printing.Print('lf')(lf)
    #Deterministic value, found in the model
    mu = model(rule_firing, lf_print)
    
    #Deterministic value, found in the model
    #mu = model(rule_firing, lf)

    # Likelihood (sampling distribution) of observations
    Normal('Y_obs', mu=mu, sd=sigma, observed=Y)

    #Slice should be used for continuous variables but it gets stuck sometimes - you can also use Metropolis
    step = Metropolis(basic_model.vars)
github alan-turing-institute / skpro / skpro / pymc / estimators.py View on Github external
def _default_predictive_model(y):
    with pm.Model() as model:
        mu = pm.Normal("mu", mu=y.mean(), sd=1)
        sd = pm.HalfNormal("sd", sd=1)
        y_pred = pm.Normal("y_pred", mu=mu, sd=sd, observed=y)

    return model
github probml / pyprobml / Old / pymc3 / pymc3_linreg_demo.py View on Github external
# True parameter values
alpha, sigma = 1, 1
beta = [1, 2.5]

size = 100

X1 = np.random.randn(size)
X2 = np.random.randn(size) * 0.2
Y = alpha + beta[0]*X1 + beta[1]*X2 + np.random.randn(size)*sigma

basic_model = Model()
with basic_model:
    alpha = Normal('alpha', mu=0, sd=10)
    beta = Normal('beta', mu=0, sd=10, shape=2)
    sigma = HalfNormal('sigma', sd=1)
    mu = alpha + beta[0]*X1 + beta[1]*X2
    Y_obs = Normal('Y_obs', mu=mu, sd=sigma, observed=Y)
    
print(basic_model)
    
map_estimate = find_MAP(model=basic_model)
print(map_estimate)
#{'alpha': array(0.9065985664354854), 'beta': array([ 0.948486  ,  2.60705513]), 'sigma_log': array(-0.03278146854842092)}

map_estimate2 = find_MAP(model=basic_model, fmin=opt.fmin_powell)
print(map_estimate2)

map_estimate3 = find_MAP(model=basic_model, fmin=opt.fmin_l_bfgs_b)
print(map_estimate3)

with basic_model: