How to use the botorch.models.gp_regression.FixedNoiseGP function in botorch

To help you get started, we’ve selected a few botorch examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pytorch / botorch / test / models / test_model_list_gp_regression.py View on Github external
batch_shape=torch.Size(), num_outputs=1, n=10, **tkwargs
    )
    train_x2, train_y2 = _get_random_data(
        batch_shape=torch.Size(), num_outputs=1, n=11, **tkwargs
    )
    octfs = [Standardize(m=1), Standardize(m=1)] if use_octf else [None, None]
    if fixed_noise:
        train_y1_var = 0.1 + 0.1 * torch.rand_like(train_y1, **tkwargs)
        train_y2_var = 0.1 + 0.1 * torch.rand_like(train_y2, **tkwargs)
        model1 = FixedNoiseGP(
            train_X=train_x1,
            train_Y=train_y1,
            train_Yvar=train_y1_var,
            outcome_transform=octfs[0],
        )
        model2 = FixedNoiseGP(
            train_X=train_x2,
            train_Y=train_y2,
            train_Yvar=train_y2_var,
            outcome_transform=octfs[1],
        )
    else:
        model1 = SingleTaskGP(
            train_X=train_x1, train_Y=train_y1, outcome_transform=octfs[0]
        )
        model2 = SingleTaskGP(
            train_X=train_x2, train_Y=train_y2, outcome_transform=octfs[1]
        )
    model = ModelListGP(model1, model2)
    return model.to(**tkwargs)
github pytorch / botorch / test / models / test_gp_regression_fidelity.py View on Github external
batch_shape=batch_shape,
                    m=m,
                    lin_truncated=lin_trunc,
                    **tkwargs,
                )
                # evaluate model
                model.posterior(torch.rand(torch.Size([4, num_dim]), **tkwargs))
                # test condition_on_observations
                fant_shape = torch.Size([2])
                # fantasize at different input points
                X_fant, Y_fant = _get_random_data_with_fidelity(
                    fant_shape + batch_shape, m, n_fidelity=n_fidelity, n=3, **tkwargs
                )
                c_kwargs = (
                    {"noise": torch.full_like(Y_fant, 0.01)}
                    if isinstance(model, FixedNoiseGP)
                    else {}
                )
                cm = model.condition_on_observations(X_fant, Y_fant, **c_kwargs)
                # fantasize at different same input points
                c_kwargs_same_inputs = (
                    {"noise": torch.full_like(Y_fant[0], 0.01)}
                    if isinstance(model, FixedNoiseGP)
                    else {}
                )
                cm_same_inputs = model.condition_on_observations(
                    X_fant[0], Y_fant, **c_kwargs_same_inputs
                )

                test_Xs = [
                    # test broadcasting single input across fantasy and
                    # model batches
github pytorch / botorch / test / models / test_model_list_gp_regression.py View on Github external
def _get_model(n, fixed_noise=False, **tkwargs):
    train_x1, train_x2, train_y1, train_y2 = _get_random_data(n=n, **tkwargs)
    if fixed_noise:
        train_y1_var = 0.1 + 0.1 * torch.rand_like(train_y1, **tkwargs)
        train_y2_var = 0.1 + 0.1 * torch.rand_like(train_y2, **tkwargs)
        model1 = FixedNoiseGP(
            train_X=train_x1, train_Y=train_y1, train_Yvar=train_y1_var
        )
        model2 = FixedNoiseGP(
            train_X=train_x2, train_Y=train_y2, train_Yvar=train_y2_var
        )
    else:
        model1 = SingleTaskGP(train_X=train_x1, train_Y=train_y1)
        model2 = SingleTaskGP(train_X=train_x2, train_Y=train_y2)
    model = ModelListGP(model1, model2)
    return model.to(**tkwargs)
github pytorch / botorch / test / models / test_model_list_gp_regression.py View on Github external
def _get_model(n, fixed_noise=False, **tkwargs):
    train_x1, train_x2, train_y1, train_y2 = _get_random_data(n=n, **tkwargs)
    if fixed_noise:
        train_y1_var = 0.1 + 0.1 * torch.rand_like(train_y1, **tkwargs)
        train_y2_var = 0.1 + 0.1 * torch.rand_like(train_y2, **tkwargs)
        model1 = FixedNoiseGP(
            train_X=train_x1, train_Y=train_y1, train_Yvar=train_y1_var
        )
        model2 = FixedNoiseGP(
            train_X=train_x2, train_Y=train_y2, train_Yvar=train_y2_var
        )
    else:
        model1 = SingleTaskGP(train_X=train_x1, train_Y=train_y1)
        model2 = SingleTaskGP(train_X=train_x2, train_Y=train_y2)
    model = ModelListGP(model1, model2)
    return model.to(**tkwargs)
github pytorch / botorch / test / models / test_model_list_gp_regression.py View on Github external
def _get_model(n, fixed_noise=False, use_octf=False, **tkwargs):
    train_x1, train_y1 = _get_random_data(
        batch_shape=torch.Size(), num_outputs=1, n=10, **tkwargs
    )
    train_x2, train_y2 = _get_random_data(
        batch_shape=torch.Size(), num_outputs=1, n=11, **tkwargs
    )
    octfs = [Standardize(m=1), Standardize(m=1)] if use_octf else [None, None]
    if fixed_noise:
        train_y1_var = 0.1 + 0.1 * torch.rand_like(train_y1, **tkwargs)
        train_y2_var = 0.1 + 0.1 * torch.rand_like(train_y2, **tkwargs)
        model1 = FixedNoiseGP(
            train_X=train_x1,
            train_Y=train_y1,
            train_Yvar=train_y1_var,
            outcome_transform=octfs[0],
        )
        model2 = FixedNoiseGP(
            train_X=train_x2,
            train_Y=train_y2,
            train_Yvar=train_y2_var,
            outcome_transform=octfs[1],
        )
    else:
        model1 = SingleTaskGP(
            train_X=train_x1, train_Y=train_y1, outcome_transform=octfs[0]
        )
        model2 = SingleTaskGP(
github pytorch / botorch / botorch / acquisition / analytic.py View on Github external
X_observed: Tensor,
        num_fantasies: int = 20,
        maximize: bool = True,
    ) -> None:
        r"""Single-outcome Noisy Expected Improvement (via fantasies).

        Args:
            model: A fitted single-outcome model.
            X_observed: A `n x d` Tensor of observed points that are likely to
                be the best observed points so far.
            num_fantasies: The number of fantasies to generate. The higher this
                number the more accurate the model (at the expense of model
                complexity and performance).
            maximize: If True, consider the problem a maximization problem.
        """
        if not isinstance(model, FixedNoiseGP):
            raise UnsupportedError(
                "Only FixedNoiseGPs are currently supported for fantasy NEI"
            )
        # sample fantasies
        with torch.no_grad():
            posterior = model.posterior(X=X_observed)
            sampler = SobolQMCNormalSampler(num_fantasies)
            Y_fantasized = sampler(posterior).squeeze(-1)
        batch_X_observed = X_observed.expand(num_fantasies, *X_observed.shape)
        # The fantasy model will operate in batch mode
        fantasy_model = _get_noiseless_fantasy_model(
            model=model, batch_X_observed=batch_X_observed, Y_fantasized=Y_fantasized
        )

        if maximize:
            best_f = Y_fantasized.max(dim=-1)[0]