How to use the emukit.core.loop.OuterLoop function in emukit

To help you get started, we’ve selected a few emukit examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github amzn / emukit / tests / emukit / benchmarking / test_metrics.py View on Github external
def test_mean_squared_error_metric():
    x_test = np.random.rand(50, 2)
    y_test = np.random.rand(50, 2)

    mock_model = mock.create_autospec(IModel)
    mock_model.predict.return_value = (y_test, y_test * 10)
    model_updater_mock = mock.create_autospec(ModelUpdater)
    model_updater_mock.model = mock_model
    mock_loop = mock.create_autospec(OuterLoop)
    mock_loop.model_updaters = [model_updater_mock]

    loop_state = LoopState([])
    loop_state.metrics = dict()

    mse = MeanSquaredErrorMetric(x_test, y_test)
    metric_value = mse.evaluate(mock_loop, loop_state)

    assert metric_value.shape == (2,)
github amzn / emukit / tests / emukit / benchmarking / test_metrics.py View on Github external
def test_cumulative_costs():
    x_observations = np.random.rand(50, 2)
    y_observations = np.random.rand(50, 2)
    c_observations = np.random.rand(50, 1)
    mock_model = mock.create_autospec(IModel)

    model_updater_mock = mock.create_autospec(ModelUpdater)
    model_updater_mock.model = mock_model
    mock_loop = mock.create_autospec(OuterLoop)
    mock_loop.model_updater = model_updater_mock

    loop_state = create_loop_state(x_observations, y_observations, cost=c_observations)
    loop_state.metrics = dict()

    name = 'cost'
    metric = CumulativeCostMetric(name)
    metric.reset()
    metric_value = metric.evaluate(mock_loop, loop_state)

    assert metric_value == np.cumsum(c_observations)[-1]
    assert metric_value.shape == (1,)
github amzn / emukit / tests / emukit / core / test_outer_loop.py View on Github external
return self._Y

        def predict(self, x):
            pass

        def set_data(self, x, y):
            self._X = x
            self._Y = y

        def optimize(self):
            pass

    mock_model = MockModel()
    model_updater = MockModelUpdater(mock_model)

    loop = OuterLoop(mock_next_point_calculator, model_updater)
    loop.run_loop(mock_user_function, 2)

    # Check update was last called with a loop state with all the collected data points
    assert mock_model.X.shape[0] == 2
    assert mock_model.Y.shape[0] == 2
github amzn / emukit / tests / emukit / core / test_outer_loop.py View on Github external
gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    mse = []

    def compute_mse(self, loop_state):
        mse.append(np.mean(np.square(model.predict(x_test)[0] - y_test)))

    loop_state = create_loop_state(x_init, y_init)

    acquisition = ModelVariance(model)
    acquisition_optimizer = AcquisitionOptimizer(space)
    candidate_point_calculator = SequentialPointCalculator(acquisition, acquisition_optimizer)
    model_updater = FixedIntervalUpdater(model)

    loop = OuterLoop(candidate_point_calculator, model_updater, loop_state)
    loop.iteration_end_event.append(compute_mse)
    loop.run_loop(user_function, 5)

    assert len(mse) == 5
github amzn / emukit / tests / emukit / core / test_outer_loop.py View on Github external
def test_outer_loop(mock_next_point_calculator, mock_updater, mock_user_function):
    """ Example of automatic outer loop """

    stopping_condition = mock.create_autospec(StoppingCondition)
    stopping_condition.should_stop.side_effect = [False, False, True]

    loop = OuterLoop(mock_next_point_calculator, mock_updater)
    loop.run_loop(mock_user_function, stopping_condition)

    assert (loop.loop_state.iteration == 2)
    assert (np.array_equal(loop.loop_state.X, np.array([[0], [0]])))
github amzn / emukit / emukit / experimental_design / model_based / experimental_design_loop.py View on Github external
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
from .acquisitions import ModelVariance
from ...core.acquisition import Acquisition
from ...core.interfaces.models import IModel
from ...core.loop import OuterLoop, SequentialPointCalculator, FixedIntervalUpdater
from ...core.loop.candidate_point_calculators import GreedyBatchPointCalculator
from ...core.loop.loop_state import create_loop_state
from ...core.optimization import AcquisitionOptimizer
from ...core.parameter_space import ParameterSpace


class ExperimentalDesignLoop(OuterLoop):
    def __init__(self, space: ParameterSpace, model: IModel, acquisition: Acquisition = None, update_interval: int = 1,
                 batch_size: int=1):
        """
        An outer loop class for use with Experimental design

        :param space: Definition of domain bounds to collect points within
        :param model: The model that approximates the underlying function
        :param acquisition: experimental design acquisition function object. Default: ModelVariance acquisition
        :param update_interval: How many iterations pass before next model optimization
        :param batch_size: Number of points to collect in a batch. Defaults to one.
        """

        if acquisition is None:
            acquisition = ModelVariance(model)

        # This AcquisitionOptimizer object deals with optimizing the acquisition to find the next point to collect
github amzn / emukit / emukit / bayesian_optimization / loops / cost_sensitive_bayesian_optimization_loop.py View on Github external
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0


import numpy as np

from ...bayesian_optimization.acquisitions import ExpectedImprovement
from ...core.acquisition import Acquisition, acquisition_per_expected_cost
from ...core.interfaces import IModel
from ...core.loop import FixedIntervalUpdater, OuterLoop, SequentialPointCalculator
from ...core.loop.loop_state import create_loop_state
from ...core.optimization import AcquisitionOptimizer
from ...core.parameter_space import ParameterSpace


class CostSensitiveBayesianOptimizationLoop(OuterLoop):
    def __init__(self, space: ParameterSpace, model_objective: IModel, model_cost: IModel,
                 acquisition: Acquisition = None, update_interval: int = 1):

        """
        Emukit class that implements a loop for building modular cost sensitive Bayesian optimization.

        :param space: Input space where the optimization is carried out.
        :param model_objective: The model that approximates the underlying objective function
        :param model_cost: The model that approximates the cost of evaluating the objective function
        :param acquisition: The acquisition function that will be used to collect new points (default, EI).
        :param update_interval:  Number of iterations between optimization of model hyper-parameters. Defaults to 1.
        """

        if not np.all(np.isclose(model_objective.X, model_cost.X)):
            raise ValueError('Emukit currently only supports identical '
                             'training inputs for the cost and objective model')
github amzn / emukit / emukit / bayesian_optimization / loops / bayesian_optimization_loop.py View on Github external
import numpy as np

from ...core.acquisition import Acquisition
from ...core.interfaces import IDifferentiable, IModel
from ...core.loop import FixedIntervalUpdater, OuterLoop, SequentialPointCalculator
from ...core.loop.loop_state import create_loop_state, LoopState
from ...core.optimization import AcquisitionOptimizer
from ...core.parameter_space import ParameterSpace
from ..acquisitions import ExpectedImprovement
from ..acquisitions.log_acquisition import LogAcquisition
from ..local_penalization_calculator import LocalPenalizationPointCalculator


class BayesianOptimizationLoop(OuterLoop):
    def __init__(self, space: ParameterSpace, model: IModel, acquisition: Acquisition = None, update_interval: int = 1,
                 batch_size: int = 1):

        """
        Emukit class that implement a loop for building modular Bayesian optimization

        :param space: Input space where the optimization is carried out.
        :param model: The model that approximates the underlying function
        :param acquisition: The acquisition function that will be used to collect new points (default, EI). If batch
                            size is greater than one, this acquisition must output positive values only.
        :param update_interval: Number of iterations between optimization of model hyper-parameters. Defaults to 1.
        :param batch_size: How many points to evaluate in one iteration of the optimization loop. Defaults to 1.
        """

        self.model = model
github amzn / emukit / emukit / quadrature / loop / quadrature_loop.py View on Github external
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0


from emukit.core.loop.loop_state import create_loop_state
from emukit.core.loop import OuterLoop, SequentialPointCalculator, FixedIntervalUpdater, ModelUpdater
from emukit.core.optimization import AcquisitionOptimizer
from emukit.core.parameter_space import ParameterSpace
from emukit.core.acquisition import Acquisition
from emukit.quadrature.methods import VanillaBayesianQuadrature
from emukit.quadrature.acquisitions import IntegralVarianceReduction


class VanillaBayesianQuadratureLoop(OuterLoop):
    def __init__(self, model: VanillaBayesianQuadrature, acquisition: Acquisition = None,
                 model_updater: ModelUpdater = None):
        """
        The loop for vanilla Bayesian Quadrature

        :param model: the vanilla Bayesian quadrature method
        :param acquisition: The acquisition function that is be used to collect new points.
        default, IntegralVarianceReduction
        :param model_updater: Defines how and when the quadrature model is updated if new data arrives.
                              Defaults to updating hyper-parameters every iteration.
        """

        if acquisition is None:
            acquisition = IntegralVarianceReduction(model)

        if model_updater is None:
github amzn / emukit / emukit / examples / gp_bayesian_optimization / optimization_loops.py View on Github external
# Create acquisition
    if acquisition_type is AcquisitionType.EI:
        acquisition = ExpectedImprovement(model)
    elif acquisition_type is AcquisitionType.PI:
        acquisition = ProbabilityOfImprovement(model)
    elif acquisition_type is AcquisitionType.NLCB:
        acquisition = NegativeLowerConfidenceBound(model)
    else:
        raise ValueError('Unrecognised acquisition type: ' + str(acquisition_type))

    acquisition_optimizer = AcquisitionOptimizer(parameter_space)
    candidate_point_calculator = SequentialPointCalculator(acquisition, acquisition_optimizer)
    loop_state = create_loop_state(x_init, y_init)
    model_updater = FixedIntervalUpdater(model, 1)
    return OuterLoop(candidate_point_calculator, model_updater, loop_state)