How to use the nevergrad.optimization.base function in nevergrad

To help you get started, we’ve selected a few nevergrad examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github facebookresearch / nevergrad / nevergrad / optimization / test_base.py View on Github external
def __call__(self, instrumentation: Union[int, Instrumentation], budget: Optional[int] = None, num_workers: int = 1) -> base.Optimizer:
        class_ = base.registry["Zero"] if self._kwargs.get("zero", True) else base.registry["StupidRandom"]
        run = class_(instrumentation=instrumentation, budget=budget, num_workers=num_workers)
        run.name = self._repr
        return run
github facebookresearch / nevergrad / nevergrad / optimization / test_optimizerlib.py View on Github external
def test_optimizers_suggest(name: str) -> None:  # pylint: disable=redefined-outer-name
    with warnings.catch_warnings():
        # tests do not need to be efficient
        warnings.simplefilter("ignore", category=base.InefficientSettingsWarning)
        optimizer = registry[name](instrumentation=4, budget=2)
        optimizer.suggest(np.array([12.0] * 4))
        candidate = optimizer.ask()
        try:
            optimizer.tell(candidate, 12)
        except base.TellNotAskedNotSupportedError:
            pass
github facebookresearch / nevergrad / nevergrad / optimization / test_optimizerlib.py View on Github external
def check_optimizer(optimizer_cls: Union[base.OptimizerFamily, Type[base.Optimizer]], budget: int = 300, verify_value: bool = True) -> None:
    # recast optimizer do not support num_workers > 1, and respect no_parallelization.
    num_workers = 1 if optimizer_cls.recast or optimizer_cls.no_parallelization else 2
    num_attempts = 1 if not verify_value else 2  # allow 2 attemps to get to the optimum (shit happens...)
    optimum = [0.5, -0.8]
    if optimizer_cls in (optlib.PBIL,):
        optimum = [0, 1, 0, 1, 0, 1]
    fitness = Fitness(optimum)
    for k in range(1, num_attempts + 1):
        optimizer = optimizer_cls(instrumentation=len(optimum), budget=budget, num_workers=num_workers)
        with warnings.catch_warnings():
            # tests do not need to be efficient
            warnings.filterwarnings("ignore", category=base.InefficientSettingsWarning)
            # some optimizers finish early
            warnings.filterwarnings("ignore", category=FinishedUnderlyingOptimizerWarning)
            # now optimize :)
            candidate = optimizer.minimize(fitness)
        if verify_value and "chain" not in str(optimizer_cls):
            try:
                np.testing.assert_array_almost_equal(candidate.data, optimum, decimal=1)
            except AssertionError as e:
                print(f"Attemp #{k}: failed with best point {tuple(candidate.data)}")
                if k == num_attempts:
                    raise e
            else:
                break
    # check population queue
    if hasattr(optimizer, "population"):  # TODO add a PopBasedOptimizer
        assert len(optimizer.population._queue) == len(set(optimizer.population._queue)), "Queue has duplicated items"  # type: ignore
github facebookresearch / nevergrad / nevergrad / optimization / test_base.py View on Github external
def test_compare(tmp_path: Path) -> None:
    optimizer = optimizerlib.CMA(instrumentation=3, budget=1000, num_workers=5)
    optimizerlib.addCompare(optimizer)
    for i in range(1000):
        x: List[Any] = []
        for j in range(6):
            x += [optimizer.ask()]
        winners = sorted(x, key=lambda x_: np.linalg.norm(x_.data-np.array((1.,1.,1.))))
        optimizer.compare(winners[:3], winners[3:])  # type: ignore
    result = optimizer.provide_recommendation()
    print(result)
    np.testing.assert_almost_equal(result.data[0], 1., decimal=2)


class StupidFamily(base.OptimizerFamily):

    def __call__(self, instrumentation: Union[int, Instrumentation], budget: Optional[int] = None, num_workers: int = 1) -> base.Optimizer:
        class_ = base.registry["Zero"] if self._kwargs.get("zero", True) else base.registry["StupidRandom"]
        run = class_(instrumentation=instrumentation, budget=budget, num_workers=num_workers)
        run.name = self._repr
        return run


def test_optimizer_family() -> None:
    for zero in [True, False]:
        optf = StupidFamily(zero=zero)
        opt = optf(instrumentation=2, budget=4, num_workers=1)
        recom = opt.minimize(test_optimizerlib.Fitness([.5, -.8]))
        np.testing.assert_equal(recom.data == np.zeros(2), zero)
github facebookresearch / nevergrad / nevergrad / optimization / recastlib.py View on Github external
def __init__(
        self, instrumentation: Union[int, base.instru.Instrumentation], budget: Optional[int] = None, num_workers: int = 1
    ) -> None:
        super().__init__(instrumentation, budget=budget, num_workers=num_workers)
        self._parameters = ScipyOptimizer()
        self.multirun = 1  # work in progress
        self.initial_guess: Optional[base.ArrayLike] = None
github facebookresearch / nevergrad / nevergrad / optimization / optimizerlib.py View on Github external
def _internal_tell_not_asked(self, candidate: base.Candidate, value: float) -> None:
        raise base.TellNotAskedNotSupportedError
github facebookresearch / nevergrad / nevergrad / optimization / recaster.py View on Github external
    @property
    def messages(self) -> List[Message]:
        return self._thread.messages

    def stop(self) -> None:
        self._thread.stop()

    def __del__(self) -> None:
        self.stop()  # del method of the thread class does not work


class FinishedUnderlyingOptimizerWarning(Warning):
    pass


class RecastOptimizer(base.Optimizer):
    """Base class for ask and tell optimizer derived from implementations with no ask and tell interface.
    The underlying optimizer implementation is a function which is supposed to call directly the function
    to optimize. It is tricked into optimizing a "fake" function in a thread:
    - calls to the fake functions are returned by the "ask()" interface
    - return values of the fake functions are provided to the thread when calling "tell(x, value)"

    Note
    ----
    These implementations are not necessarily robust. More specifically, one cannot "tell" any
    point which was not "asked" before.
    """

    recast = True

    def __init__(self, instrumentation: Union[int, Instrumentation], budget: Optional[int] = None, num_workers: int = 1) -> None:
        super().__init__(instrumentation, budget, num_workers=num_workers)
github facebookresearch / nevergrad / nevergrad / optimization / optimizerlib.py View on Github external
def __init__(self, instrumentation: Union[int, Instrumentation], budget: Optional[int] = None, num_workers: int = 1) -> None:
        super().__init__(instrumentation, budget=budget, num_workers=num_workers)
        self.sigma = 1
        self.mu = self.dimension
        self.llambda = 4 * self.dimension
        if num_workers is not None:
            self.llambda = max(self.llambda, num_workers)
        self.current_center: np.ndarray = np.zeros(self.dimension)
        self._loss_record: List[float] = []
        # population
        self._evaluated_population: List[base.utils.Individual] = []
        self._unevaluated_population: Dict[bytes, base.utils.Individual] = {}
github facebookresearch / nevergrad / nevergrad / optimization / optimizerlib.py View on Github external
self.es.tell(self.listx, self.listy)
            except RuntimeError:
                pass
            else:
                self.listx = []
                self.listy = []

    def _internal_provide_recommendation(self) -> ArrayLike:
        if self._es is None:
            raise RuntimeError("Either ask or tell method should have been called before")
        if self.es.result.xbest is None:
            return self.current_bests["pessimistic"].x
        return self.es.result.xbest  # type: ignore


class ParametrizedCMA(base.ParametrizedFamily):
    """TODO

    Parameters
    ----------
    scale: float
        scale of the search
    diagonal: bool
        use the diagonal version of CMA (advised in big dimension)
    """

    _optimizer_class = _CMA

    def __init__(self, *, scale: float = 1.0, diagonal: bool = False) -> None:
        self.scale = scale
        self.diagonal = diagonal
        super().__init__()
github facebookresearch / nevergrad / nevergrad / optimization / cec2019_optimizer.py View on Github external
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.


from typing import Tuple, Optional, Union
import numpy as np
from . import base


@base.registry.register
class CustomOptimizer(base.Optimizer):
    """Simple but sometimes powerful optimization algorithm.

    We use the one-fifth adaptation rule, going back to Schumer and Steiglitz (1968).
    It was independently rediscovered by Devroye (1972) and Rechenberg (1973).
    We use asynchronous updates, so that the 1+1 can actually be parallel and even
    performs quite well in such a context - this is naturally close to 1+lambda.
    """

    def __init__(
        self, instrumentation: Union[base.instru.Instrumentation, int], budget: Optional[int] = None, num_workers: int = 1
    ) -> None:
        super().__init__(instrumentation, budget=budget, num_workers=num_workers)
        self.sigma: float = 1

    def _internal_ask(self) -> base.ArrayLike: