How to use the pymc.Uninformative function in pymc

To help you get started, we’ve selected a few pymc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aflaxman / gbd / tests / test_covariates.py View on Github external
eta_true = pl.log(50)
    delta_true = 50 + pl.exp(eta_true)

    p = mc.rnegative_binomial(pi_true*ess, delta_true*pl.exp(Z*zeta_true)) / ess

    
    model.input_data = pandas.DataFrame(dict(value=p, z_0=Z))
    model.input_data['area'] = 'all'
    model.input_data['sex'] = 'total'
    model.input_data['year_start'] = 2000
    model.input_data['year_end'] = 2000



    # create model and priors
    vars = dict(mu=mc.Uninformative('mu_test', value=pi_true))
    vars.update(covariate_model.mean_covariate_model('test', vars['mu'], model.input_data, {}, model, 'all', 'total', 'all'))
    vars.update(covariate_model.dispersion_covariate_model('test', model.input_data, .1, 10.))
    vars.update(rate_model.neg_binom_model('test', vars['pi'], vars['delta'], p, ess))

    # fit model
    m = mc.MCMC(vars)
    m.sample(2)
github aflaxman / gbd / book / neg_binom_sim.py View on Github external
### @export 'neg-binom-sim-study'
pi_true = .025
delta_true = 5.
n_pred = 1.e9

for i in range(replicates):
    print '\nsimulation replicate %d' % i
    ## generate simulated data
    n = pl.array(pl.exp(mc.rnormal(10, 1**-2, size=16)), dtype=int)
    k = pl.array(mc.rnegative_binomial(n*pi_true, delta_true), dtype=float)
    r = k/n


    ## setup negative binomial model
    pi = mc.Uniform('pi', lower=0, upper=1, value=.5)
    delta = mc.Uninformative('delta', value=100.)

    @mc.potential
    def obs(pi=pi, delta=delta):
        return mc.negative_binomial_like(r*n, pi*n, delta)

    @mc.deterministic
    def pred(pi=pi, delta=delta):
        return mc.rnegative_binomial(pi*n_pred, delta) / float(n_pred)

    ## fit model
    mc.MCMC([pi, delta, obs, pred]).sample(iter, burn, thin)


    ## record results
    for i, stoch in enumerate([pi, pred]):
        median = stoch.stats()['quantiles'][50]
github aflaxman / gbd / book / schiz_forest.py View on Github external
@mc.deterministic
def pred(pi=pi):
    return mc.rbinomial(n_pred, pi) / float(n_pred)

### @export 'binomial-fit'
mc.MCMC([pi, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

### @export 'binomial-store'
# mc.Matplot.plot(pi)
# pl.savefig('book/graphics/ci-prev_meta_analysis-binomial_diagnostic.png')
results['Binomial'] = dict(pi=pi.stats(), pred=pred.stats())


### @export 'beta-binomial-model'
alpha = mc.Uninformative('alpha', value=4.)
beta = mc.Uninformative('beta', value=1000.)
pi_mean = mc.Lambda('pi_mean', lambda alpha=alpha, beta=beta: alpha/(alpha+beta))
pi = mc.Beta('pi', alpha, beta, value=r)

@mc.potential
def obs(pi=pi):
    return mc.binomial_like(r*n, n, pi)

@mc.deterministic
def pred(alpha=alpha, beta=beta):
    return mc.rbinomial(n_pred, mc.rbeta(alpha, beta)) / float(n_pred)

### @export 'beta-binomial-fit'
mcmc = mc.MCMC([alpha, beta, pi_mean, pi, obs, pred])
mcmc.use_step_method(mc.AdaptiveMetropolis, [alpha, beta])
mcmc.use_step_method(mc.AdaptiveMetropolis, pi)
mcmc.sample(iter*10, burn*10, thin*10, verbose=False, progress_bar=False)
github aflaxman / gbd / space_time_model / model.py View on Github external
def fe(data):
    """ Fixed Effect model::
    
        Y_r,c,t = beta * X_r,c,t + e_r,c,t
        e_r,c,t ~ N(0, sigma^2)
    """
    # covariates
    K1 = count_covariates(data, 'x')
    X = pl.array([data['x%d'%i] for i in range(K1)])

    K2 = count_covariates(data, 'w')
    W = pl.array([data['w%d'%i] for i in range(K1)])

    # priors
    beta = mc.Uninformative('beta', value=pl.zeros(K1))
    gamma = mc.Uninformative('gamma', value=pl.zeros(K2))
    sigma_e = mc.Uniform('sigma_e', lower=0, upper=1000, value=1)
    
    # predictions
    @mc.deterministic
    def mu(X=X, beta=beta):
        return pl.dot(beta, X)
    param_predicted = mu
    @mc.deterministic
    def sigma_explained(W=W, gamma=gamma):
        """ sigma_explained_i,r,c,t,a = gamma * W_i,r,c,t,a"""
        return pl.dot(gamma, W)

    @mc.deterministic
    def predicted(mu=mu, sigma_explained=sigma_explained, sigma_e=sigma_e):
        return mc.rnormal(mu, 1 / (sigma_explained**2. + sigma_e**2.))
github aflaxman / gbd / book / poisson_model.py View on Github external
return mc.poisson_like(r*n, pi*n)

@mc.deterministic
def pred(pi=pi):
    return mc.rpoisson(pi*n_pred) / float(n_pred)

### @export 'poisson-fit-and-store'
mc.MCMC([pi, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

results['Poisson'] = dict(pred=pred.stats(), pi=pi.stats())



### @export 'negative-binomial-model'
pi = mc.Uniform('pi', lower=0, upper=1, value=.5)
delta = mc.Uninformative('delta', value=100.)

@mc.potential
def obs(pi=pi, delta=delta):
    return mc.negative_binomial_like(r*n, pi*n, delta)

@mc.deterministic
def pred(pi=pi, delta=delta):
    return mc.rnegative_binomial(pi*n_pred, delta) / float(n_pred)

### @export 'negative-binomial-fit-and-store'
mc.MCMC([pi, delta, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

key = 'Negative Binomial'
results[key] = dict(pred=pred.stats(), pi=pi.stats())

#mc.Matplot.plot(pi)
github aflaxman / gbd / book / beta_binomial_model.py View on Github external
pl.axis([-.5, 15.5,-.0001,.0121])
pl.savefig('beta-binomial-funnel.pdf')

mc.Matplot.plot(alpha)
mc.Matplot.plot(beta)
mc.Matplot.plot(pi)



### @export 'beta-binomial-model-fixes-problem'
pop_A_prev = .002
pop_A_N = 50000
pop_B_prev = .006
pop_B_N = 50000

alpha = mc.Uninformative('alpha', value=10*pop_A_prev)
beta = mc.Uninformative('beta', value=10*(1-pop_A_prev))
pi = mc.Beta('pi', alpha, beta, value=[pop_A_prev, pop_B_prev, .02])
@mc.potential
def obs(pi=pi):
    return pop_A_prev*pop_A_N*pl.log(pi[0]) + (1-pop_A_prev)*pop_A_N*pl.log(1-pi[0]) \
        + pop_B_prev*pop_B_N*pl.log(pi[1]) + (1-pop_B_prev)*pop_B_N*pl.log(1-pi[1])
pop_C_N = 50000
pop_C_k = mc.Binomial('pop_C_k', pop_C_N, pi[2])
mc.MCMC([alpha, beta, pi, obs, pop_C_k]).sample(200000,100000,20)

pop_C_prev = pop_C_k.stats()['quantiles'][50] / float(pop_C_N)
pop_C_prev_per_1000 = '%.0f' % (pop_C_prev*1000)
print pop_C_prev_per_1000

pop_C_ui = pop_C_k.stats()['95% HPD interval'] / float(pop_C_N)
pop_C_ui_per_1000 = '[%.0f, %.0f]' % tuple(pop_C_ui*1000)
github aflaxman / gbd / consistent_model.py View on Github external
knots.append(pl.clip((row['age_start'] + row['age_end'] + 1.) / 2., 0, 100))
            
            m_all[knots[-1]] = row['value']

        # extend knots as constant beyond endpoints
        knots = sorted(knots)
        m_all[0] = m_all[knots[0]]
        m_all[100] = m_all[knots[-1]]

        knots.insert(0, 0)
        knots.append(100)

        m_all = scipy.interpolate.interp1d(knots, m_all[knots], kind='linear')(pl.arange(101))
    m_all = m_all[ages]

    logit_C0 = mc.Uninformative('logit_C0', value=-10.)


    # use Runge-Kutta 4 ODE solver
    import dismod_ode

    N = len(m_all)
    num_step = 10  # double until it works
    ages = pl.array(ages, dtype=float)
    fun = dismod_ode.ode_function(num_step, ages, m_all)

    @mc.deterministic
    def mu_age_p(logit_C0=logit_C0,
                 i=rate['i']['mu_age'],
                 r=rate['r']['mu_age'],
                 f=rate['f']['mu_age']):
github aflaxman / gbd / book / schiz_forest.py View on Github external
def obs(pi=pi):
    return mc.poisson_like(r*n, pi*n)

@mc.deterministic
def pred(pi=pi):
    return mc.rpoisson(pi*n_pred) / float(n_pred)

### @export 'poisson-fit-and-store'
mc.MCMC([pi, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

results['Poisson'] = dict(pi=pi.stats(), pred=pred.stats())


### @export 'negative-binomial-model'
pi = mc.Uniform('pi', lower=0, upper=1, value=.5)
delta = mc.Uninformative('delta', value=100.)

@mc.potential
def obs(pi=pi, delta=delta):
    return mc.negative_binomial_like(r*n, pi*n, delta)

@mc.deterministic
def pred(pi=pi, delta=delta):
    return mc.rnegative_binomial(pi*n_pred, delta) / float(n_pred)

### @export 'negative-binomial-fit-and-store'
mc.MCMC([pi, delta, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

results['Negative binomial'] = dict(pi=pi.stats(), pred=pred.stats())


### @export 'normal-model'

pymc

Probabilistic Programming in Python: Bayesian Modeling and Probabilistic Machine Learning with PyTensor

Apache-2.0
Latest version published 4 days ago

Package Health Score

93 / 100
Full package analysis