How to use the pymc3.traceplot function in pymc3

To help you get started, we’ve selected a few pymc3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github neuropsychology / Neuropsydia.py / neuropsydia / stats.py View on Github external
with pymc3.Model() as model: # model specifications in PyMC3 are wrapped in a with-statement
        if family == "Normal":
            family = pymc3.glm.families.Normal()
            if robust == True:
                family = pymc3.glm.families.StudentT()

        pymc3.glm.glm(formula, data, family=family)
        start = pymc3.find_MAP()
        step = pymc3.NUTS(scaling=start) # Instantiate MCMC sampling algorithm
        trace = pymc3.sample(samples, step, progressbar=True) # draw 2000 posterior samples using NUTS sampling

#    trace = trace[int(samples/4):]
    #PLOT POSTERIOR DISTRIBUTION
    if plot_posterior == True:
        pymc3.traceplot(trace)

    #PLOT LINES
    if plot_regression == True:
        plot_data= []
        plot_data.append(go.Scatter(x=x,
                        y=y,
                        mode = 'markers'))



        if plot_samples == "default":
            if len(trace) > 100:
                plot_samples = 100
                samples_range = np.random.randint(0, len(trace), plot_samples)
            else:
                plot_samples = samples
github DAI-Lab / SDGym / synthetic_data_benchmark / preprocessing / DPM_C.py View on Github external
with pm.Model() as model:
    alpha = pm.Gamma('alpha', 1., 1.)
    beta = pm.Beta('beta', 1., alpha, shape=K)
    w = pm.Deterministic('w', stick_breaking(beta))
    tau = pm.Gamma('tau', 1., 1., shape=K)
    lambda_ = pm.Uniform('lambda', 0, 5, shape=K)
    mu = pm.Normal('mu', 0, tau=lambda_ * tau, shape=K)
    obs = pm.NormalMixture('obs', w, mu, tau=lambda_ * tau,
                           observed=g_train[:,0])
    
    
with model:
    trace = pm.sample(5000, random_seed=SEED)
    
pm.traceplot(trace, varnames=['alpha']);


# plots
fig, ax = plt.subplots(figsize=(8, 6))
plot_w = np.arange(K) + 1
ax.bar(plot_w - 0.5, trace['w'].mean(axis=0), width=1., lw=0);

ax.set_xlim(0.5, K);
ax.set_xlabel('Component');
ax.set_ylabel('Posterior expected mixture weight');

post_pdf_contribs = sp.stats.norm.pdf(np.atleast_3d(x_plot),
                                      trace['mu'][:, np.newaxis, :],
                                      1. / np.sqrt(trace['lambda'] * trace['tau'])[:, np.newaxis, :])
post_pdfs = (trace['w'][:, np.newaxis, :] * post_pdf_contribs).sum(axis=-1)
post_pdf_low, post_pdf_high = np.percentile(post_pdfs, [2.5, 97.5], axis=0)
github pymc-devs / pymc3 / pymc3 / examples / rankdata_ordered.py View on Github external
def run(n=1500):
    if n == 'short':
        n = 50

    with m:
        trace = pm.sample(n)

    pm.traceplot(trace, varnames=['mu_hat'])

    print('Example observed data: ')
    print(y[:30, :].T)
    print('The true ranking is: ')
    print(yreal.flatten())
    print('The Latent mean is: ')
    latentmu = np.hstack(([0], pm.summary(trace, varnames=['mu_hat'])['mean'].values))
    print(np.round(latentmu, 2))
    print('The estimated ranking is: ')
    print(np.argsort(latentmu))
github aloctavodia / Doing_bayesian_data_analysis / 16_SimpleLinearRegressionPyMC.py View on Github external
yl = pm.Normal('yl', mu=mu, sd=sd, observed=zy)
    # Generate a MCMC chain
    trace = pm.sample(1000)


# EXAMINE THE RESULTS

## Print summary for each trace
#pm.summary(trace)

## Check for mixing and autocorrelation
#pm.autocorrplot(trace, vars =[tau])


## Plot KDE and sampled values for each parameter.
pm.traceplot(trace)


## Extract chain values:
z0 = trace['beta0']
z1 = trace['beta1']
z_sigma = trace['sd']


# Convert to original scale:
b1 = z1 * y_sd / x_sd
b0 = (z0 * y_sd + y_m - z1 * y_sd * x_m / x_sd)
sigma = z_sigma * y_sd


# Posterior prediction:
# Specify x values for which predicted y's are needed:
github pymc-devs / pymc3 / pymc3 / examples / disaster_model.py View on Github external
switchpoint = pm.DiscreteUniform('switchpoint', lower=year.min(), upper=year.max())
    early_mean = pm.Exponential('early_mean', lam=1.)
    late_mean = pm.Exponential('late_mean', lam=1.)

    # Allocate appropriate Poisson rates to years before and after current
    # switchpoint location
    rate = tt.switch(switchpoint >= year, early_mean, late_mean)
    
    disasters = pm.Poisson('disasters', rate, observed=disasters_data)

    # Initial values for stochastic nodes
    start = {'early_mean': 2., 'late_mean': 3.}
    
    tr = pm.sample(1000, tune=500, start=start)
    pm.traceplot(tr)
github aloctavodia / Doing_bayesian_data_analysis / 18_ANOVAonewayPyMC.py View on Github external
# EXAMINE THE RESULTS
burnin = 1000
thin = 10

# Print summary for each trace
#pm.summary(trace[burnin::thin])
#pm.summary(trace)

# Check for mixing and autocorrelation
#pm.autocorrplot(trace[burnin::thin], vars=model.unobserved_RVs[:-1])

## Plot KDE and sampled values for each parameter.
#pm.traceplot(trace[burnin::thin])
pm.traceplot(trace)

a0_sample = trace['a0'][burnin::thin]
b_sample = trace['b'][burnin::thin]
b0_sample = a0_sample * np.std(y) + np.mean(y)
b_sample = b_sample * np.std(y)


plt.figure(figsize=(20, 4))
for i in range(5):
    ax = plt.subplot(1, 5, i+1)
    pm.plot_posterior(b_sample[:,i], bins=50, ax=ax)
    ax.set_xlabel(r'$\beta1_{}$'.format(i))
    ax.set_title('x:{}'.format(i))
plt.tight_layout()
plt.savefig('Figure_18.2a.png')
github SimonOuellette35 / PyData-talk---Intro-to-PyMC3 / Cauchy.py View on Github external
def bayesianCenter(data):

    with pm.Model():
        loc = pm.Uniform('location', lower=-1000., upper=1000.)
        scale = pm.Uniform('scale', lower=0.01, upper=1000.)

        pm.Cauchy('y', alpha=loc, beta=scale, observed=data)

        trace = pm.sample(3000, tune=3000, target_accept=0.92)
        pm.traceplot(trace)
        plt.show()

    return np.mean(trace['location'])
github pymc-devs / pymc3 / pymc3 / examples / ATMIP_2gaussians.py View on Github external
llk = pm.Potential('like', like)

with ATMIP_test:
    step = ATMCMC(n_chains=n_chains, tune_interval=tune_interval,
                  likelihood_name=ATMIP_test.deterministics[0].name)

trcs = ATMIP_sample(
    n_steps=n_steps,
    step=step,
    njobs=njobs,
    progressbar=True,
    trace=test_folder,
    model=ATMIP_test)

pm.summary(trcs)
Pltr = pm.traceplot(trcs, combined=True)
plt.show(Pltr[0][0])
github pymc-devs / pymc3 / pymc3 / examples / baseball.py View on Github external
def run(n=2000):
    model = build_model()
    with model:
        trace = pm.sample(n, target_accept=0.99)

    pm.traceplot(trace)
github josejimenezluna / pyGPGO / pyGPGO / surrogates / tStudentProcessMCMC.py View on Github external
def posteriorPlot(self):
        """
        Plots sampled posterior distributions for hyperparameters.

        """
        with self.model as model:
            pm.traceplot(self.trace, varnames=['l', 'sigmaf', 'sigman'])
            plt.tight_layout()
            plt.show()