How to use the pymc.Uniform function in pymc

To help you get started, we’ve selected a few pymc examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aflaxman / gbd / tests / test_rate_model.py View on Github external
def test_neg_binom_model_sim(N=16):
    # simulate negative binomial data
    pi_true = .01
    delta_true = 50

    n = pl.array(pl.exp(mc.rnormal(10, 1**-2, size=N)), dtype=int)
    k = pl.array(mc.rnegative_binomial(n*pi_true, delta_true, size=N), dtype=float)
    p = k/n

    # create NB model and priors
    vars = dict(mu_age=mc.Uniform('mu_age', 0., 1000., value=.01),
                sigma=mc.Uniform('sigma', 0., 10000., value=1000.))
    vars['mu_interval'] = mc.Lambda('mu_interval', lambda mu=vars['mu_age']: mu*pl.ones(N))
    vars.update(rate_model.log_normal_model('sim', vars['mu_interval'], vars['sigma'], p, 1./pl.sqrt(n)))

    # fit NB model
    m = mc.MCMC(vars)
    m.sample(1)
github pymc-devs / pymc3 / pymc / examples / simple.py View on Github external
import pymc

true_mu = 1.5
true_tau = 50.0
N_samples = 500

mu = pymc.Uniform('mu', lower=-100.0, upper=100.0)
tau = pymc.Gamma('tau', alpha=0.1, beta=0.001)

data = pymc.rnormal( true_mu, true_tau, size=(N_samples,) )

y = pymc.Normal('y',mu,tau,value=data,observed=True)
github astroML / astroML / book_figures / chapter5 / fig_model_comparison_mcmc.py View on Github external
N1 = mask1.sum()
    N2 = R.size - N1

    R[mask1] = norm(mu1, sigma1).rvs(N1)
    R[mask2] = norm(mu2, sigma2).rvs(N2)

    return R.reshape(Rshape)

DoubleGauss = pymc.stochastic_from_dist('doublegauss',
                                        logp=doublegauss_like,
                                        random=rdoublegauss,
                                        dtype=np.float,
                                        mv=True)

# set up our Stochastic variables, mu1, mu2, sigma1, sigma2, ratio
M2_mu1 = pymc.Uniform('M2_mu1', -5, 5, value=0)
M2_mu2 = pymc.Uniform('M2_mu2', -5, 5, value=1)

M2_log_sigma1 = pymc.Uniform('M2_log_sigma1', -10, 10, value=0)
M2_log_sigma2 = pymc.Uniform('M2_log_sigma2', -10, 10, value=0)


@pymc.deterministic
def M2_sigma1(M2_log_sigma1=M2_log_sigma1):
    return np.exp(M2_log_sigma1)


@pymc.deterministic
def M2_sigma2(M2_log_sigma2=M2_log_sigma2):
    return np.exp(M2_log_sigma2)

M2_ratio = pymc.Uniform('M2_ratio', 1E-3, 1E3, value=1)
github aflaxman / gbd / validate_age_group.py View on Github external
"""Midpoint/covariate model sq"""
    # Create age-group model
    ## Spline model to represent age-specific rate
    model.vars += dismod3.age_pattern.spline(name='midc', ages=model.ages,
                                             knots=knots,
                                             smoothing=pl.inf,
                                             interpolation_method='linear')

    ## Midpoint model to represent age-group data
    model.vars += dismod3.age_group.midpoint_covariate_approx(name='midc', ages=model.ages,
                                                              mu_age=model.vars['mu_age'], 
                                                              age_start=model.input_data['age_start'], age_end=model.input_data['age_end'],
                                                              transform=lambda x: x**2.)

    ## Uniform prior on negative binomial rate model over-dispersion term
    model.vars += {'delta': mc.Uniform('delta_midc', 0., 1000., value=10.)}

    ## Negative binomial rate model
    model.vars += dismod3.rate_model.neg_binom(name='midc',
                                               pi=model.vars['mu_interval'],
                                               delta=model.vars['delta'],
                                               p=model.input_data['value'],
                                               n=model.input_data['effective_sample_size'])

    fit_model(model)
github astroML / astroML / book_figures / chapter5 / fig_gaussgauss_mcmc.py View on Github external
s2_e2 = sigma ** 2 + ei ** 2
    return -0.5 * np.sum(np.log(s2_e2) + (xi - mu) ** 2 / s2_e2, 0)


#------------------------------------------------------------
# Select the data
np.random.seed(5)
mu_true = 1.
sigma_true = 1.
N = 10
ei = 3 * np.random.random(N)
xi = np.random.normal(mu_true, np.sqrt(sigma_true ** 2 + ei ** 2))

#----------------------------------------------------------------------
# Set up MCMC for our model parameters: (mu, sigma, ei)
mu = pymc.Uniform('mu', -10, 10, value=0)
log_sigma = pymc.Uniform('log_sigma', -10, 10, value=0)
log_error = pymc.Uniform('log_error', -10, 10, value=np.zeros(N))


@pymc.deterministic
def sigma(log_sigma=log_sigma):
    return np.exp(log_sigma)


@pymc.deterministic
def error(log_error=log_error):
    return np.exp(log_error)


def gaussgauss_like(x, mu, sigma, error):
    """likelihood of gaussian with gaussian errors"""
github aflaxman / gbd / covariate_model.py View on Github external
def dispersion_covariate_model(name, input_data, delta_lb, delta_ub):
    lower = pl.log(delta_lb)
    upper = pl.log(delta_ub)
    eta=mc.Uniform('eta_%s'%name, lower=lower, upper=upper, value=.5*(lower+upper))

    Z = input_data.select(lambda col: col.startswith('z_'), axis=1)
    Z = Z.select(lambda col: Z[col].std() > 0, 1)  # drop blank cols
    if len(Z.columns) > 0:
        zeta = mc.Normal('zeta_%s'%name, 0, .25**-2, value=pl.zeros(len(Z.columns)))

        @mc.deterministic(name='delta_%s'%name)
        def delta(eta=eta, zeta=zeta, Z=Z.__array__()):
            return pl.exp(eta + pl.dot(Z, zeta))

        return dict(eta=eta, Z=Z, zeta=zeta, delta=delta)

    else:
        @mc.deterministic(name='delta_%s'%name)
        def delta(eta=eta):
            return pl.exp(eta) * pl.ones_like(input_data.index)
github sczesla / PyAstronomy / src / funcFit / syncFit.py View on Github external
# Get the names of the free parameters
    freeNames = self.freeParamNames()
    print("Free parameters: ", freeNames)
    # Check whether parameter lists are complete, define default steps
    # if necessary. 
    self._dictComplete(freeNames, X0, "start values", forget=list(pymcPars))
    self._dictComplete(freeNames, Lims, "limits", forget=list(pymcPars))
    self._dictComplete(freeNames, Steps, "steps")
    
    # Define (or complete) the pymcPars dictionary by defining uniformly distributed
    # variables in the range [lim[0], lim[1]] with starting values defined by X0.
    for par in freeNames:
      if par in pymcPars: continue
      print("Using uniform distribution for parameter: ", par)
      print("  Start value: ", X0[par], ", Limits = [", Lims[par][0], ", ", Lims[par][1], "]")
      pymcPars[par] = pymc.Uniform(par, lower=Lims[par][0], upper=Lims[par][1], value=X0[par], doc="Automatically assigned parameter.")
    
    def getConcatenatedModel():
      result = None
      for k in six.iterkeys(self._compos):
        if result is None:
          result = self.models[k]
        else:
          result = numpy.concatenate( (result, self.models[k]) )
      return result
    
    # This function is used to update the model
    def getModel(**vals):
      self.assignValue(vals)
      self.updateModel()
      return getConcatenatedModel()
github aflaxman / gbd / book / schiz_forest.py View on Github external
pred_s = pl.sqrt(r * (1-r) / n_pred)
@mc.deterministic
def pred(pi=pi, sigma=sigma):
    s_pred = pl.sqrt(pi*(1-pi)/n_pred)
    return pl.exp(mc.rnormal(pl.log(pi), 1./((s_pred/pi)**2 + sigma**2)))

### @export 'log-normal-fit-and-store'
mc.MCMC([pi, sigma, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

results['Lognormal'] = dict(pi=pi.stats(), pred=pred.stats())


### @export 'offset-log-normal-model'
pi = mc.Uniform('pi', lower=0, upper=1, value=.5)
zeta = mc.Uniform('zeta', lower=0, upper=.005, value=.001)
sigma = mc.Uniform('sigma', lower=0, upper=10, value=.01)

@mc.potential
def obs(pi=pi, zeta=zeta, sigma=sigma):
    return mc.normal_like(pl.log(r+zeta), pl.log(pi+zeta), 1./((s/(r+zeta))**2 + sigma**2))

@mc.deterministic
def pred(pi=pi, zeta=zeta, sigma=sigma):
    s_pred = pl.sqrt(pi*(1-pi)/n_pred)
    return pl.exp(mc.rnormal(pl.log(pi+zeta),
                    1./((s_pred/(pi+zeta))**2 + sigma**2))) \
                - zeta

### @export 'offset-log-normal-fit-and-store'
mc.MCMC([pi, zeta, sigma, obs, pred]).sample(iter, burn, thin, verbose=False, progress_bar=False)

pymc

Probabilistic Programming in Python: Bayesian Modeling and Probabilistic Machine Learning with PyTensor

Apache-2.0
Latest version published 16 days ago

Package Health Score

93 / 100
Full package analysis