How to use the emukit.core.continuous_parameter.ContinuousParameter function in emukit

To help you get started, we’ve selected a few emukit examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github amzn / emukit / tests / emukit / bayesian_optimization / test_bayesian_optimization_loop.py View on Github external
def test_batch_loop_fails_without_gradients_implemented():
    parameter_space = ParameterSpace([ContinuousParameter('x', 0, 1)])

    model = mock.create_autospec(IModel)

    base_acquisition = ExpectedImprovement(model)

    batch_size = 10

    with pytest.raises(ValueError):
        BayesianOptimizationLoop(parameter_space, model, base_acquisition, batch_size)
github amzn / emukit / integration_tests / emukit / bayesian_optimization / test_single_objective_bayesian_optimization.py View on Github external
def test_loop():
    n_iterations = 5

    x_init = np.random.rand(5, 1)
    y_init = np.random.rand(5, 1)
    x = ContinuousParameter('x', 0, 1)
    bo = GPBayesianOptimization(variables_list=[x], X=x_init, Y=y_init)
    bo.run_optimization(f, n_iterations)

    # Check we got the correct number of points
    assert bo.loop_state.X.shape[0] == n_iterations + 5
github amzn / emukit / tests / emukit / experimental_design / test_experimental_design_loop.py View on Github external
def test_loop():
    n_iterations = 5

    x_init = np.random.rand(5, 1)
    y_init = np.random.rand(5, 1)

    # Make GPy model
    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    space = ParameterSpace([ContinuousParameter('x', 0, 1)])
    acquisition = ModelVariance(model)

    # Make loop and collect points
    exp_design = ExperimentalDesignLoop(space, model, acquisition)
    exp_design.run_loop(UserFunctionWrapper(f), FixedIterationsStoppingCondition(n_iterations))

    # Check we got the correct number of points
    assert exp_design.loop_state.X.shape[0] == 10
github amzn / emukit / tests / emukit / bayesian_optimization / test_bayesian_optimization_loop.py View on Github external
def test_loop():
    n_iterations = 5

    x_init = np.random.rand(5, 1)
    y_init = np.random.rand(5, 1)

    # Make GPy model
    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)

    space = ParameterSpace([ContinuousParameter('x', 0, 1)])
    acquisition = ExpectedImprovement(model)

    # Make loop and collect points
    bo = BayesianOptimizationLoop(model=model, space=space, acquisition=acquisition)
    bo.run_loop(UserFunctionWrapper(f), FixedIterationsStoppingCondition(n_iterations))

    # Check we got the correct number of points
    assert bo.loop_state.X.shape[0] == n_iterations + 5

    # Check the obtained results
    results = bo.get_results()

    assert results.minimum_location.shape[0] == 1
    assert results.best_found_value_per_iteration.shape[0] == n_iterations + 5
github amzn / emukit / tests / emukit / experimental_design / test_experimental_design_loop.py View on Github external
def test_loop_initial_state():
    x_init = np.random.rand(5, 1)
    y_init = np.random.rand(5, 1)

    gpy_model = GPy.models.GPRegression(x_init, y_init)
    model = GPyModelWrapper(gpy_model)
    space = ParameterSpace([ContinuousParameter('x', 0, 1)])

    exp_design = ExperimentalDesignLoop(space, model)

    # test loop state initialization
    assert_array_equal(exp_design.loop_state.X, x_init)
    assert_array_equal(exp_design.loop_state.Y, y_init)
github amzn / emukit / tests / emukit / quadrature / test_vanilla_bq.py View on Github external
def test_vanilla_bq_model():
    X_train = np.random.rand(5, 2)
    Y_train = np.random.rand(5, 1)

    mock_cparam = mock.create_autospec(ContinuousParameter)
    mock_bounds = mock.create_autospec(IntegralBounds)
    mock_bounds.convert_to_list_of_continuous_parameters.return_value = 2 * [mock_cparam]
    mock_kern = mock.create_autospec(QuadratureKernel, integral_bounds=mock_bounds)
    mock_gp = mock.create_autospec(IBaseGaussianProcess, kern=mock_kern, X=X_train, Y=Y_train)
    method = VanillaBayesianQuadrature(base_gp=mock_gp)

    assert_array_equal(method.X, X_train)
    assert_array_equal(method.Y, Y_train)
    # we assert this to make sure that integral bounds in the kernel match, since that is where the integration happens.
    # the test is restrictive but easier to do than behavioral test when integral bounds are changed.
    assert method.integral_bounds == mock_bounds
    assert method.integral_parameters == 2 * [mock_cparam]
github amzn / emukit / emukit / quadrature / kernels / integral_bounds.py View on Github external
def convert_to_list_of_continuous_parameters(self) -> List[ContinuousParameter]:
        """
        converts the integral bounds into a list of ContinuousParameter objects
        :return: a list if ContinuousParameter objects (one for each dimension)
        """
        continuous_parameters = []
        for i, bounds_d in enumerate(self.bounds):
            lb_d, ub_d = bounds_d
            name_d = self.name + '_' + str(i)
            param = ContinuousParameter(name=name_d, min_value=lb_d, max_value=ub_d)
            continuous_parameters.append(param)
        return continuous_parameters
github amzn / emukit / emukit / core / categorical_parameter.py View on Github external
def __init__(self, name: str, encoding: Encoding):
        self.name = name

        # ensure float just in case we were given integers
        self.encoding = encoding

        self._cont_params = []
        for column_idx in range(self.encodings.shape[1]):
            cont_param = ContinuousParameter(name + '_' + str(column_idx),
                                             np.min(self.encodings[:, column_idx]),
                                             np.max(self.encodings[:, column_idx]))
            self._cont_params.append(cont_param)