How to use the nevergrad.instrumentation.var function in nevergrad

To help you get started, we’ve selected a few nevergrad examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github facebookresearch / nevergrad / nevergrad / functions / photonics / core.py View on Github external
Parameters
    name: str
        problem name, among bragg, chirped and morpho
    dimension: int
        size of the problem among 16, 40 and 60 (morpho) or 80 (bragg and chirped)
    transform: str
        transform type for the bounding ("arctan", "tanh" or "clipping", see `Array.bounded`)

    Returns
    -------
    Instrumentation
        the instrumentation for the problem
    """
    assert not dimension % 4, f"points length should be a multiple of 4, got {dimension}"
    n = dimension // 4
    arrays: List[inst.var.Array] = []
    if name == "bragg":
        # n multiple of 2, from 16 to 80
        # domain (n=60): [2,3]^30 x [0,300]^30
        arrays.extend([inst.var.Array(n).bounded(2, 3, transform=transform) for _ in range(2)])
        arrays.extend([inst.var.Array(n).bounded(0, 300, transform=transform) for _ in range(2)])
    elif name == "chirped":
        # n multiple of 2, from 10 to 80
        # domain (n=60): [0,300]^60
        arrays = [inst.var.Array(n).bounded(0, 300, transform=transform) for _ in range(4)]
    elif name == "morpho":
        # n multiple of 4, from 16 to 60
        # domain (n=60): [0,300]^15 x [0,600]^15 x [30,600]^15 x [0,300]^15
        arrays.extend([inst.var.Array(n).bounded(0, 300, transform=transform),
                       inst.var.Array(n).bounded(0, 600, transform=transform),
                       inst.var.Array(n).bounded(30, 600, transform=transform),
                       inst.var.Array(n).bounded(0, 300, transform=transform)])
github facebookresearch / nevergrad / nevergrad / optimization / test_optimizerlib.py View on Github external
def test_bo_instrumentation_and_parameters() -> None:
    # instrumentation
    instrumentation = inst.Instrumentation(inst.var.SoftmaxCategorical([True, False]))
    with pytest.warns(base.InefficientSettingsWarning):
        optlib.QRBO(instrumentation, budget=10)
    with pytest.warns(None) as record:
        opt = optlib.ParametrizedBO(gp_parameters={"alpha": 1})(instrumentation, budget=10)
    assert not record, record.list  # no warning
    # parameters
    # make sure underlying BO optimizer gets instantiated correctly
    opt.tell(opt.create_candidate.from_call(True), 0.0)
github facebookresearch / nevergrad / nevergrad / functions / games / game.py View on Github external
def __init__(self, game: str = "war") -> None:
        self.game = game
        self.game_object = _Game()
        the_dimension = self.game_object.play_game(self.game) * 2  # times 2 because we consider both players separately.
        instrumentation = Instrumentation(inst.var.Array(the_dimension))
        super().__init__(self._simulate_game, instrumentation)
        self.instrumentation.probably_noisy = True
        self.instrumentation.scrambled = True
        self._descriptors.update(game=game)
github facebookresearch / nevergrad / nevergrad / functions / games / game.py View on Github external
def __init__(self, game: str = "war") -> None:
        self.game = game
        self.game_object = _Game()
        the_dimension = self.game_object.play_game(self.game) * 2  # times 2 because we consider both players separately.
        instrumentation = Instrumentation(inst.var.Array(the_dimension))
        super().__init__(self._simulate_game, instrumentation)
        self.instrumentation.probably_noisy = True
        self.instrumentation.is_nonmetrizable = game in ["war", "batawaf"]
        self._descriptors.update(game=game)
github facebookresearch / nevergrad / nevergrad / functions / rl / agents.py View on Github external
def __init__(self, module: nn.Module,
                 deterministic: bool = True,
                 instrumentation_std: float = 0.1) -> None:
        super().__init__()
        self.deterministic = deterministic
        self.module = module
        kwargs = {
            name: inst.var.Array(*value.shape).affined(a=instrumentation_std).bounded(-10, 10, transform="arctan")
            for name, value in module.state_dict().items()  # type: ignore
        }  # bounded to avoid overflows
        self.instrumentation = inst.Instrumentation(**kwargs)
github facebookresearch / nevergrad / nevergrad / optimization / base.py View on Github external
def __init__(self, instrumentation: Union[instru.Instrumentation, int], budget: Optional[int] = None, num_workers: int = 1) -> None:
        if self.no_parallelization and num_workers > 1:
            raise ValueError(f"{self.__class__.__name__} does not support parallelization")
        # "seedable" random state: externally setting the seed will provide deterministic behavior
        # you can also replace or reinitialize this random state
        self.num_workers = int(num_workers)
        self.budget = budget
        # How do we deal with cheap constraints i.e. constraints which are fast and use low resources and easy ?
        # True ==> we penalize them (infinite values for candidates which violate the constraint).
        # False ==> we repeat the ask until we solve the problem.
        self._penalize_cheap_violations = False
        self.instrumentation = (
            instrumentation
            if isinstance(instrumentation, instru.Instrumentation)
            else instru.Instrumentation(instru.var.Array(instrumentation))
        )
        if not self.dimension:
            raise ValueError("No variable to optimize in this instrumentation.")
        self.create_candidate = CandidateMaker(self.instrumentation)
        self.name = self.__class__.__name__  # printed name in repr
        # keep a record of evaluations, and current bests which are updated at each new evaluation
        self.archive: utils.Archive[utils.Value] = utils.Archive()  # dict like structure taking np.ndarray as keys and Value as values
        self.current_bests = {
            x: utils.Point(np.zeros(self.dimension, dtype=np.float), utils.Value(np.inf)) for x in ["optimistic", "pessimistic", "average"]
        }
        # pruning function, called at each "tell"
        # this can be desactivated or modified by each implementation
        self.pruning: Optional[Callable[[utils.Archive[utils.Value]], utils.Archive[utils.Value]]] = utils.Pruning.sensible_default(
            num_workers=num_workers, dimension=self.instrumentation.dimension
        )
        # instance state
github uber / bayesmark / example_opt_root / nevergrad_optimizer.py View on Github external
param_type = param_config["type"]

            param_space = param_config.get("space", None)
            param_range = param_config.get("range", None)
            param_values = param_config.get("values", None)

            prewarp = None
            if param_type == "cat":
                assert param_space is None
                assert param_range is None
                arg = inst.var.SoftmaxCategorical(param_values)
            elif param_type == "bool":
                assert param_space is None
                assert param_range is None
                assert param_values is None
                arg = inst.var.OrderedDiscrete([False, True])
            elif param_values is not None:
                assert param_type in ("int", "ordinal", "real")
                arg = inst.var.OrderedDiscrete(param_values)
                # We are throwing away information here, but OrderedDiscrete
                # appears to be invariant to monotonic transformation anyway.
            elif param_type == "int":
                assert param_values is None
                # Need +1 since API in inclusive
                choices = range(int(param_range[0]), int(param_range[-1]) + 1)
                arg = inst.var.OrderedDiscrete(choices)
                # We are throwing away information here, but OrderedDiscrete
                # appears to be invariant to monotonic transformation anyway.
            elif param_type == "real":
                assert param_values is None
                assert param_range is not None
                # Will need to warp to this space sep.
github uber / bayesmark / bayesmark / builtin_opt / nevergrad_optimizer.py View on Github external
param_range = param_config.get("range", None)
            param_values = param_config.get("values", None)

            prewarp = None
            if param_type == "cat":
                assert param_space is None
                assert param_range is None
                arg = inst.var.SoftmaxCategorical(param_values)
            elif param_type == "bool":
                assert param_space is None
                assert param_range is None
                assert param_values is None
                arg = inst.var.OrderedDiscrete([False, True])
            elif param_values is not None:
                assert param_type in ("int", "ordinal", "real")
                arg = inst.var.OrderedDiscrete(param_values)
                # We are throwing away information here, but OrderedDiscrete
                # appears to be invariant to monotonic transformation anyway.
            elif param_type == "int":
                assert param_values is None
                # Need +1 since API in inclusive
                choices = range(int(param_range[0]), int(param_range[-1]) + 1)
                arg = inst.var.OrderedDiscrete(choices)
                # We are throwing away information here, but OrderedDiscrete
                # appears to be invariant to monotonic transformation anyway.
            elif param_type == "real":
                assert param_values is None
                assert param_range is not None
                # Will need to warp to this space sep.
                arg = inst.var.Gaussian(mean=0, std=1)
                prewarp = Real(warp=param_space, range_=param_range)
            else:
github brian-team / brian2modelfitting / brian2modelfitting / optimizer.py View on Github external
def initialize(self, parameter_names, popsize, **params):
        self.tested_parameters = []
        self.errors = []
        for param in params:
            if param not in parameter_names:
                raise ValueError("Parameter %s must be defined as a parameter "
                                 "in the model" % param)

        bounds = calc_bounds(parameter_names, **params)

        instruments = []
        for i, name in enumerate(parameter_names):
            assert len(bounds[i]) == 2
            instrumentation = inst.var.Array(1).asscalar().bounded(np.array([bounds[i][0]]),
                                                                   np.array([bounds[i][1]]))
            instruments.append(instrumentation)

        instrum = inst.Instrumentation(*instruments)
        self.optim = optimizerlib.registry[self.method](instrumentation=instrum,
                                                        **self.kwds)

        self.optim._llambda = popsize  # TODO: more elegant way once possible
github facebookresearch / nevergrad / nevergrad / functions / mlda / problems.py View on Github external
def __init__(self, transform: Optional[str] = None) -> None:
        super().__init__(self._get_pixel_value, inst.var.Array(1).asscalar(), inst.var.Array(1).asscalar())
        self.instrumentation = self.instrumentation.with_name("standard")  # force descriptor update
        self._image = datasets.get_data("Landscape")
        if transform == "gaussian":
            variables = list(inst.var.OrderedDiscrete(list(range(x))) for x in self._image.shape)
            self.instrumentation = inst.Instrumentation(*variables).with_name("gaussian")
        elif transform == "square":
            stds = (np.array(self._image.shape) - 1.) / 2.
            variables2 = list(inst.var.Gaussian(s, s) for s in stds)
            self.instrumentation = inst.Instrumentation(*variables2).with_name("square")  # maybe buggy, try again?
        elif transform is not None:
            raise ValueError(f"Unknown transform {transform}")
        self._max = float(self._image.max())