How to use the abcpy.perturbationkernel.DefaultKernel function in abcpy

To help you get started, we’ve selected a few abcpy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github eth-cscs / abcpy / examples / statisticslearning / pmcabc_gaussian_statistics_learning.py View on Github external
# Learn the optimal summary statistics using SemiautomaticNN summary selection
    from abcpy.statisticslearning import SemiautomaticNN
    statistics_learning = SemiautomaticNN([height], statistics_calculator, backend,
                                        n_samples=1000,n_samples_per_param=1, seed=1)

    # Redefine the statistics function
    new_statistics_calculator = statistics_learning.get_statistics()


    # define distance
    from abcpy.distances import Euclidean
    distance_calculator = Euclidean(new_statistics_calculator)

    # define kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([mu, sigma])

    # define sampling scheme
    from abcpy.inferences import PMCABC
    sampler = PMCABC([height], [distance_calculator], backend, kernel, seed=1)

    # sample from scheme
    T, n_sample, n_samples_per_param = 3, 10, 10
    eps_arr = np.array([500])
    epsilon_percentile = 10
    journal = sampler.sample([height_obs],  T, eps_arr, n_sample, n_samples_per_param, epsilon_percentile)

    return journal
github eth-cscs / abcpy / abcpy / inferences.py View on Github external
def __init__(self, root_models, distances, backend, kernel=None,seed=None):
        self.model = root_models
        # We define the joint Linear combination distance using all the distances for each individual models
        self.distance = LinearCombination(root_models, distances)

        if kernel is None:

            mapping, garbage_index = self._get_mapping()
            models = []
            for mdl, mdl_index in mapping:
                models.append(mdl)

            kernel = DefaultKernel(models)

        self.kernel = kernel
        self.backend = backend
        self.rng = np.random.RandomState(seed)
        self.anneal_parameter = None
        self.logger = logging.getLogger(__name__)


        # these are usually big tables, so we broadcast them to have them once
        # per executor instead of once per task
        self.accepted_parameters_manager = AcceptedParametersManager(self.model)

        self.simulation_counter = 0
github eth-cscs / abcpy / examples / extensions / models / gaussian_python / pmcabc_gaussian_model_simple.py View on Github external
sigma = Uniform([[5], [25]], )

    # define the model
    height = Gaussian([mu, sigma], name='height')

    # define statistics
    from abcpy.statistics import Identity
    statistics_calculator = Identity(degree = 2, cross = False)

    # define distance
    from abcpy.distances import LogReg
    distance_calculator = LogReg(statistics_calculator)

    # define kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([mu, sigma])

    # define backend
    # Note, the dummy backend does not parallelize the code!
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()

    # define sampling scheme
    from abcpy.inferences import PMCABC
    sampler = PMCABC([height], [distance_calculator], backend, kernel, seed=1)

    # sample from scheme
    T, n_sample, n_samples_per_param = 3, 250, 10
    eps_arr = np.array([.75])
    epsilon_percentile = 10
    journal = sampler.sample([height_obs],  T, eps_arr, n_sample, n_samples_per_param, epsilon_percentile)
github eth-cscs / abcpy / abcpy / inferences.py View on Github external
def __init__(self, root_models, distances, backend, kernel=None, seed=None):
        self.model = root_models
        # We define the joint Linear combination distance using all the distances for each individual models
        self.distance = LinearCombination(root_models, distances)

        if (kernel is None):

            mapping, garbage_index = self._get_mapping()
            models = []
            for mdl, mdl_index in mapping:
                models.append(mdl)
            kernel = DefaultKernel(models)

        self.kernel = kernel
        self.backend = backend
        self.rng = np.random.RandomState(seed)
        self.logger = logging.getLogger(__name__)

        # these are usually big tables, so we broadcast them to have them once
        # per executor instead of once per task
        self.smooth_distances_bds = None
        self.all_distances_bds = None
        self.accepted_parameters_manager = AcceptedParametersManager(self.model)

        self.simulation_counter = 0
github eth-cscs / abcpy / examples / hierarchicalmodels / pmcabc_inference_on_multiple_sets_of_obs.py View on Github external
from abcpy.statistics import Identity
    statistics_calculator_final_grade = Identity(degree = 2, cross = False)
    statistics_calculator_final_scholarship = Identity(degree = 3, cross = False)

    # Define a distance measure for final grade and final scholarship
    from abcpy.distances import Euclidean
    distance_calculator_final_grade = Euclidean(statistics_calculator_final_grade)
    distance_calculator_final_scholarship = Euclidean(statistics_calculator_final_scholarship)

    # Define a backend
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()

    # Define a perturbation kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([school_location, class_size, grade_without_additional_effects, \
                            background, scholarship_without_additional_effects])

    # Define sampling parameters
    T, n_sample, n_samples_per_param = 3, 250, 10
    eps_arr = np.array([.75])
    epsilon_percentile = 10

    # Define sampler
    from abcpy.inferences import PMCABC
    sampler = PMCABC([final_grade, final_scholarship], \
                     [distance_calculator_final_grade, distance_calculator_final_scholarship], backend, kernel)

    # Sample
    journal = sampler.sample([grades_obs, scholarship_obs], \
                             T, eps_arr, n_sample, n_samples_per_param, epsilon_percentile)
github eth-cscs / abcpy / examples / approx_lhd / pmc_hierarchical_models.py View on Github external
from abcpy.statistics import Identity
    statistics_calculator_final_grade = Identity(degree = 2, cross = False)
    statistics_calculator_final_scholarship = Identity(degree = 3, cross = False)

    # Define a distance measure for final grade and final scholarship
    from abcpy.approx_lhd import SynLikelihood
    approx_lhd_final_grade = SynLikelihood(statistics_calculator_final_grade)
    approx_lhd_final_scholarship = SynLikelihood(statistics_calculator_final_scholarship)

    # Define a backend
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()

    # Define a perturbation kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([school_location, class_size, grade_without_additional_effects, \
                            background, scholarship_without_additional_effects])

    # Define sampling parameters
    T, n_sample, n_samples_per_param = 3, 250, 10

    # Define sampler
    from abcpy.inferences import PMC
    sampler = PMC([final_grade, final_scholarship], \
                     [approx_lhd_final_grade, approx_lhd_final_scholarship], backend, kernel)

    # Sample
    journal = sampler.sample([grades_obs, scholarship_obs], T, n_sample, n_samples_per_param)
github eth-cscs / abcpy / examples / backends / mpi / mpi_pmc_hierarchical_models.py View on Github external
statistics_calculator_final_scholarship = Identity(degree = 3, cross = False)

    # Define a distance measure for final grade and final scholarship
    from abcpy.approx_lhd import SynLiklihood
    approx_lhd_final_grade = SynLiklihood(statistics_calculator_final_grade)
    approx_lhd_final_scholarship = SynLiklihood(statistics_calculator_final_scholarship)

    # Define a backend
    # from abcpy.backends import BackendDummy as Backend
    # backend = Backend()

    setup_backend()

    # Define a perturbation kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([school_location, class_size, grade_without_additional_effects, \
                            background, scholarship_without_additional_effects])

    # Define sampling parameters
    T, n_sample, n_samples_per_param = 3, 250, 10

    # Define sampler
    from abcpy.inferences import PMC
    sampler = PMC([final_grade, final_scholarship], \
                     [approx_lhd_final_grade, approx_lhd_final_scholarship], backend, kernel)

    # Sample
    journal = sampler.sample([grades_obs, scholarship_obs], T, n_sample, n_samples_per_param)
github eth-cscs / abcpy / abcpy / inferences.py View on Github external
def __init__(self, root_models, distances, backend, kernel = None, seed=None):
        self.model = root_models
        # We define the joint Linear combination distance using all the distances for each individual models
        self.distance = LinearCombination(root_models, distances)

        if (kernel is None):

            mapping, garbage_index = self._get_mapping()
            models = []
            for mdl, mdl_index in mapping:
                models.append(mdl)
            kernel = DefaultKernel(models)

        self.kernel = kernel
        self.backend = backend
        self.logger = logging.getLogger(__name__)

        self.epsilon = None
        self.rng = np.random.RandomState(seed)

        # these are usually big tables, so we broadcast them to have them once
        # per executor instead of once per task\
        self.accepted_parameters_manager = AcceptedParametersManager(self.model)
        self.accepted_y_sim_bds = None

        self.simulation_counter = 0
github eth-cscs / abcpy / examples / backends / dummy / pmcabc_gaussian.py View on Github external
# define the model
    from abcpy.continuousmodels import Normal
    height = Normal([mu, sigma], )
    
    # define statistics
    from abcpy.statistics import Identity
    statistics_calculator = Identity(degree = 2, cross = False)
    
    # define distance
    from abcpy.distances import LogReg
    distance_calculator = LogReg(statistics_calculator)
    
    # define kernel
    from abcpy.perturbationkernel import DefaultKernel
    kernel = DefaultKernel([mu, sigma])

    # define backend
    # Note, the dummy backend does not parallelize the code!
    from abcpy.backends import BackendDummy as Backend
    backend = Backend()
    
    # define sampling scheme
    from abcpy.inferences import PMCABC
    sampler = PMCABC([height], [distance_calculator], backend, kernel, seed=1)
    
    # sample from scheme
    T, n_sample, n_samples_per_param = 3, 250, 10
    eps_arr = np.array([.75])
    epsilon_percentile = 10
    journal = sampler.sample([height_obs],  T, eps_arr, n_sample, n_samples_per_param, epsilon_percentile)