How to use the elfi.Discrepancy function in elfi

To help you get started, we’ve selected a few elfi examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github elfi-dev / elfi / elfi / examples / gnk.py View on Github external
priors.append(elfi.Prior('uniform', 0, 10, model=m, name='g'))
    priors.append(elfi.Prior('uniform', 0, 10, model=m, name='k'))

    # Obtaining the observations.
    y_obs = GNK(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed))

    # Defining the simulator.
    fn_simulator = partial(GNK, n_obs=n_obs)
    elfi.Simulator(fn_simulator, *priors, observed=y_obs, name='GNK')

    # Initialising the summary statistics as in Allingham et al. (2009).
    default_ss = elfi.Summary(ss_order, m['GNK'], name='ss_order')

    # Using the multi-dimensional Euclidean distance function as
    # the summary statistics' implementations are designed for multi-dimensional cases.
    elfi.Discrepancy(euclidean_multiss, default_ss, name='d')
    return m
github elfi-dev / elfi / elfi / methods / diagnostics.py View on Github external
Returns
        -------
        array_like
            Accepted parameters.

        """
        # Initialise the distance function.
        m = self.simulator.model.copy()
        list_ss = []
        for ss in set_ss:
            list_ss.append(elfi.Summary(ss, m[self.simulator.name], model=m))
        if isinstance(self.fn_distance, str):
            d = elfi.Distance(self.fn_distance, *list_ss, model=m)
        else:
            d = elfi.Discrepancy(self.fn_distance, *list_ss, model=m)

        # Run the simulations.
        # TODO: include different distance functions in the summary-statistics combinations.
        sampler_rejection = elfi.Rejection(d, batch_size=batch_size,
                                           seed=self.seed, pool=self.pool)
        result = sampler_rejection.sample(n_acc, n_sim=n_sim)

        # Extract the accepted parameters.
        thetas_acc = result.samples_array
        return thetas_acc
github elfi-dev / elfi / elfi / examples / daycare.py View on Github external
sim_fn = partial(daycare, **kwargs)
    priors = []
    sumstats = []

    priors.append(elfi.Prior('uniform', 0, 11, model=m, name='t1'))
    priors.append(elfi.Prior('uniform', 0, 2, model=m, name='t2'))
    priors.append(elfi.Prior('uniform', 0, 1, model=m, name='t3'))

    elfi.Simulator(sim_fn, *priors, observed=y_obs, name='DCC')

    sumstats.append(elfi.Summary(ss_shannon, m['DCC'], name='Shannon'))
    sumstats.append(elfi.Summary(ss_strains, m['DCC'], name='n_strains'))
    sumstats.append(elfi.Summary(ss_prevalence, m['DCC'], name='prevalence'))
    sumstats.append(elfi.Summary(ss_prevalence_multi, m['DCC'], name='multi'))

    elfi.Discrepancy(distance, *sumstats, name='d')

    logger.info("Generated observations with true parameters "
                "t1: %.1f, t2: %.3f, t3: %.1f, ", *true_params)

    return m
github elfi-dev / elfi / elfi / examples / bignk.py View on Github external
EPS = np.finfo(float).eps
    priors.append(elfi.Prior('uniform', -1 + EPS, 2 - 2 * EPS, model=m, name='rho'))

    # Obtaining the observations.
    y_obs = BiGNK(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed))

    # Defining the simulator.
    fn_simulator = partial(BiGNK, n_obs=n_obs)
    elfi.Simulator(fn_simulator, *priors, observed=y_obs, name='BiGNK')

    # Initialising the default summary statistics.
    default_ss = elfi.Summary(ss_robust, m['BiGNK'], name='ss_robust')

    # Using the customEuclidean distance function designed for
    # the summary statistics of shape (batch_size, dim_ss, dim_ss_point).
    elfi.Discrepancy(euclidean_multiss, default_ss, name='d')
    return m
github elfi-dev / elfi / elfi / old / inference_task.py View on Github external
def discrepancy(self):
        # FIXME: redesign the dependencies so that Discrepancy can be imported on top
        # Perhaps make a separate file for bare abstract classes
        import elfi
        ds = self._find_by_class(elfi.Discrepancy)
        if len(ds) == 0:
            raise Exception("Couldn't find a discrepancy node")
        elif len(ds) > 1:
            raise Exception("More than one discrepancy nodes found")
        return ds[0]
github elfi-dev / elfi / elfi / methods.py View on Github external
def __init__(self, distance_node=None, parameter_nodes=None, batch_size=1000,
                 store=None):

        if not isinstance(distance_node, Discrepancy):
            raise TypeError("Distance node needs to inherit elfi.Discrepancy")
        if not all(map(lambda n: isinstance(n, Transform), parameter_nodes)):
            raise TypeError("Parameter nodes need to inherit elfi.Operation")

        self.distance_node = distance_node
        self.parameter_nodes = parameter_nodes
        self.n_params = len(parameter_nodes)
        self.batch_size = int(batch_size)
        self.store = core.prepare_store(store)
github elfi-dev / elfi / elfi / examples / ricker.py View on Github external
true_params = [3.8]

    m = elfi.ElfiModel()
    y_obs = simulator(*true_params, n_obs=n_obs, random_state=np.random.RandomState(seed_obs))
    sim_fn = partial(simulator, n_obs=n_obs)
    sumstats = []

    if stochastic:
        elfi.Prior(ss.expon, np.e, 2, model=m, name='t1')
        elfi.Prior(ss.truncnorm, 0, 5, model=m, name='t2')
        elfi.Prior(ss.uniform, 0, 100, model=m, name='t3')
        elfi.Simulator(sim_fn, m['t1'], m['t2'], m['t3'], observed=y_obs, name='Ricker')
        sumstats.append(elfi.Summary(partial(np.mean, axis=1), m['Ricker'], name='Mean'))
        sumstats.append(elfi.Summary(partial(np.var, axis=1), m['Ricker'], name='Var'))
        sumstats.append(elfi.Summary(num_zeros, m['Ricker'], name='#0'))
        elfi.Discrepancy(chi_squared, *sumstats, name='d')

    else:  # very simple deterministic case
        elfi.Prior(ss.expon, np.e, model=m, name='t1')
        elfi.Simulator(sim_fn, m['t1'], observed=y_obs, name='Ricker')
        sumstats.append(elfi.Summary(partial(np.mean, axis=1), m['Ricker'], name='Mean'))
        elfi.Distance('euclidean', *sumstats, name='d')

    return m