How to use the pymc3.model.modelcontext function in pymc3

To help you get started, we’ve selected a few pymc3 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pymc-devs / pymc3 / pymc3 / sampling.py View on Github external
Seed for the random number generator.
    progressbar: bool, optional default True
        Whether or not to display a progress bar in the command line. The bar shows the percentage
        of completion, the sampling speed in samples per second (SPS), and the estimated remaining
        time until completion ("expected time of arrival"; ETA).

    Returns
    -------
    samples : dict
        Dictionary with the variables as keys. The values corresponding to the
        posterior predictive samples from the weighted models.
    """
    np.random.seed(random_seed)

    if models is None:
        models = [modelcontext(models)] * len(traces)

    if weights is None:
        weights = [1] * len(traces)

    if len(traces) != len(weights):
        raise ValueError("The number of traces and weights should be the same")

    if len(models) != len(weights):
        raise ValueError("The number of models and weights should be the same")

    length_morv = len(models[0].observed_RVs)
    if not all(len(i.observed_RVs) == length_morv for i in models):
        raise ValueError("The number of observed RVs should be the same for all models")

    weights = np.asarray(weights)
    p = weights / np.sum(weights)
github pymc-devs / pymc3 / pymc3 / step_methods / slicer.py View on Github external
def __init__(self, vars=None, w=1., tune=True, model=None,
                 iter_limit=np.inf, **kwargs):
        self.model = modelcontext(model)
        self.w = w
        self.tune = tune
        self.n_tunes = 0.
        self.iter_limit = iter_limit

        if vars is None:
            vars = self.model.cont_vars
        vars = inputvars(vars)

        super().__init__(vars, [self.model.fastlogp], **kwargs)
github pymc-devs / pymc3 / pymc3 / sampling.py View on Github external
def _iter_sample(
    draws, step, start=None, trace=None, chain=0, tune=None, model=None, random_seed=None
):
    model = modelcontext(model)
    draws = int(draws)
    if random_seed is not None:
        np.random.seed(random_seed)
    if draws < 1:
        raise ValueError("Argument `draws` must be greater than 0.")

    if start is None:
        start = {}

    strace = _choose_backend(trace, chain, model=model)

    if len(strace) > 0:
        update_start_vals(start, strace.point(-1), model)
    else:
        update_start_vals(start, model.test_point, model)
github pymc-devs / pymc3 / pymc3 / smc / smc.py View on Github external
self.kernel = kernel
        self.n_steps = n_steps
        self.parallel = parallel
        self.start = start
        self.cores = cores
        self.tune_steps = tune_steps
        self.p_acc_rate = p_acc_rate
        self.threshold = threshold
        self.epsilon = epsilon
        self.dist_func = dist_func
        self.sum_stat = sum_stat
        self.progressbar = progressbar
        self.model = model
        self.random_seed = random_seed

        self.model = modelcontext(model)

        if self.random_seed != -1:
            np.random.seed(self.random_seed)

        if self.cores is None:
            self.cores = _cpu_count()

        self.beta = 0
        self.max_steps = n_steps
        self.proposed = draws * n_steps
        self.acc_rate = 1
        self.acc_per_chain = np.ones(self.draws)
        self.model.marginal_log_likelihood = 0
        self.variables = inputvars(self.model.vars)
        dimension = sum(v.dsize for v in self.variables)
        self.scalings = np.ones(self.draws) * min(1, 2.38 ** 2 / dimension)
github pymc-devs / pymc3 / pymc3 / step_methods / hmc / base_hmc.py View on Github external
scaling : array_like, ndim = {1,2}
            Scaling for momentum distribution. 1d arrays interpreted matrix
            diagonal.
        step_scale : float, default=0.25
            Size of steps to take, automatically scaled down by 1/n**(1/4)
        is_cov : bool, default=False
            Treat scaling as a covariance matrix/vector if True, else treat
            it as a precision matrix/vector
        model : pymc3 Model instance
        blocked: bool, default=True
        potential : Potential, optional
            An object that represents the Hamiltonian with methods `velocity`,
            `energy`, and `random` methods.
        **theano_kwargs: passed to theano functions
        """
        self._model = modelcontext(model)

        if vars is None:
            vars = self._model.cont_vars
        vars = inputvars(vars)

        super().__init__(vars, blocked=blocked, model=model, dtype=dtype, **theano_kwargs)

        self.adapt_step_size = adapt_step_size
        self.Emax = Emax
        self.iter_count = 0
        size = self._logp_dlogp_func.size

        self.step_size = step_scale / (size ** 0.25)
        self.step_adapt = step_sizes.DualAverageAdaptation(
            self.step_size, target_accept, gamma, k, t0
        )
github pymc-devs / pymc3 / pymc3 / glm / families.py View on Github external
def _get_priors(self, model=None, name=''):
        """Return prior distributions of the likelihood.

        Returns
        -------
        dict : mapping name -> pymc3 distribution
        """
        if name:
            name = '{}_'.format(name)
        model = modelcontext(model)
        priors = {}
        for key, val in self.priors.items():
            if isinstance(val, (numbers.Number, np.ndarray, np.generic)):
                priors[key] = val
            else:
                priors[key] = model.Var('{}{}'.format(name, key), val)

        return priors
github pymc-devs / pymc3 / pymc3 / variational / svgd.py View on Github external
def svgd(vars=None, n=5000, n_particles=100, jitter=.01,
         optimizer=adagrad, start=None, progressbar=True,
         random_seed=None, model=None):

    if random_seed is not None:
        np.random.seed(random_seed)

    model = modelcontext(model)
    if vars is None:
        vars = model.vars
    vars = pm.inputvars(vars)

    if start is None:
        start = model.test_point
    start = model.dict_to_array(start)

    # Initialize particles
    x0 = np.tile(start, (n_particles, 1))
    x0 += np.random.normal(0, jitter, x0.shape)

    theta = theano.shared(x0)

    # Create theano svgd gradient expression and function
    logp_grad_vec = _make_vectorized_logp_grad(vars, model, theta)
github hvasbath / beat / beat / sampler / base.py View on Github external
def _iter_sample(draws, step, start=None, trace=None, chain=0, tune=None,
                 model=None, random_seed=-1, overwrite=True,
                 update_proposal=False, keep_last=False):
    """
    Modified from :func:`pymc3.sampling._iter_sample`

    tune: int
        adaptiv step-size scaling is stopped after this chain sample
    """

    model = modelcontext(model)

    draws = int(draws)

    if draws < 1:
        raise ValueError('Argument `draws` should be above 0.')

    if start is None:
        start = {}

    if random_seed != -1:
        seed(random_seed)

    try:
        step = CompoundStep(step)
    except TypeError:
        pass
github pymc-devs / pymc3 / pymc3 / sampling.py View on Github external
def _prepare_iter_population(
    draws, chains, step, start, parallelize, tune=None, model=None, random_seed=None,
        progressbar=True
):
    """Prepares a PopulationStepper and traces for population sampling.

    Returns
    -------
    _iter_population : generator
        The generator the yields traces of all chains at the same time
    """
    # chains contains the chain numbers, but for indexing we need indices...
    nchains = len(chains)
    model = modelcontext(model)
    draws = int(draws)
    if random_seed is not None:
        np.random.seed(random_seed)
    if draws < 1:
        raise ValueError("Argument `draws` should be above 0.")

    # The initialization of traces, samplers and points must happen in the right order:
    # 1. traces are initialized and update_start_vals configures variable transforms
    # 2. population of points is created
    # 3. steppers are initialized and linked to the points object
    # 4. traces are configured to track the sampler stats
    # 5. a PopulationStepper is configured for parallelized stepping

    # 1. prepare a BaseTrace for each chain
    traces = [_choose_backend(None, chain, model=model) for chain in chains]
    for c, strace in enumerate(traces):
github pymc-devs / pymc3 / pymc3 / tuning / scaling.py View on Github external
def guess_scaling(point, vars=None, model=None, scaling_bound=1e-8):
    model = modelcontext(model)
    try:
        h = find_hessian_diag(point, vars, model=model)
    except NotImplementedError:
        h = fixed_hessian(point, vars, model=model)
    return adjust_scaling(h, scaling_bound)