How to use the adaptive.learner.base_learner.BaseLearner function in adaptive

To help you get started, we’ve selected a few adaptive examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github python-adaptive / adaptive / adaptive / learner / learnerND.py View on Github external
if transform is not None:
        point = np.linalg.solve(transform, point)  # undo the transform

    return point


def _simplex_evaluation_priority(key):
    # We round the loss to 8 digits such that losses
    # are equal up to numerical precision will be considered
    # to be equal. This is needed because we want the learner
    # to behave in a deterministic fashion.
    loss, simplex, subsimplex = key
    return -round(loss, ndigits=8), simplex, subsimplex or (0,)


class LearnerND(BaseLearner):
    """Learns and predicts a function 'f: ℝ^N → ℝ^M'.

    Parameters
    ----------
    func: callable
        The function to learn. Must take a tuple of N real
        parameters and return a real number or an arraylike of length M.
    bounds : list of 2-tuples or `scipy.spatial.ConvexHull`
        A list ``[(a_1, b_1), (a_2, b_2), ..., (a_n, b_n)]`` containing bounds,
        one pair per dimension.
        Or a ConvexHull that defines the boundary of the domain.
    loss_per_simplex : callable, optional
        A function that returns the loss for a simplex.
        If not provided, then a default is used, which uses
        the deviation from a linear estimate, as well as
        triangle area, to determine the loss.
github python-adaptive / adaptive / adaptive / learner / learner1D.py View on Github external
xs_left[0] = None
    xs_right[-1] = None
    neighbors = {x: [x_L, x_R] for x, x_L, x_R in zip(xs, xs_left, xs_right)}
    return sortedcontainers.SortedDict(neighbors)


def _get_intervals(x, neighbors, nth_neighbors):
    nn = nth_neighbors
    i = neighbors.index(x)
    start = max(0, i - nn - 1)
    end = min(len(neighbors), i + nn + 2)
    points = neighbors.keys()[start:end]
    return list(zip(points, points[1:]))


class Learner1D(BaseLearner):
    """Learns and predicts a function 'f:ℝ → ℝ^N'.

    Parameters
    ----------
    function : callable
        The function to learn. Must take a single real parameter and
        return a real number.
    bounds : pair of reals
        The bounds of the interval on which to learn 'function'.
    loss_per_interval: callable, optional
        A function that returns the loss for a single interval of the domain.
        If not provided, then a default is used, which uses the scaled distance
        in the x-y plane as the loss. See the notes for more details.

    Attributes
    ----------
github python-adaptive / adaptive / adaptive / learner / sequence_learner.py View on Github external
def __init__(self, function):
        self.function = function

    def __call__(self, index_point, *args, **kwargs):
        index, point = index_point
        return self.function(point, *args, **kwargs)

    def __getstate__(self):
        return self.function

    def __setstate__(self, function):
        self.__init__(function)


class SequenceLearner(BaseLearner):
    r"""A learner that will learn a sequence. It simply returns
    the points in the provided sequence when asked.

    This is useful when your problem cannot be formulated in terms of
    another adaptive learner, but you still want to use Adaptive's
    routines to run, save, and plot.

    Parameters
    ----------
    function : callable
        The function to learn. Must take a single element `sequence`.
    sequence : sequence
        The sequence to learn.

    Attributes
    ----------
github python-adaptive / adaptive / adaptive / learner / learner2D.py View on Github external
c = np.setdiff1d(n.reshape(-1), tri.simplices[i])
        return np.concatenate((tri.points[c], ip.values[c]), axis=-1)

    simplices = np.concatenate(
        [tri.points[tri.simplices], ip.values[tri.simplices]], axis=-1
    )
    neighbors = [get_neighbors(i, ip) for i in range(len(tri.simplices))]

    return [
        sum(simplex_volume_in_embedding(np.vstack([simplex, n])) for n in neighbors[i])
        / len(neighbors[i])
        for i, simplex in enumerate(simplices)
    ]


class Learner2D(BaseLearner):
    """Learns and predicts a function 'f: ℝ^2 → ℝ^N'.

    Parameters
    ----------
    function : callable
        The function to learn. Must take a tuple of two real
        parameters and return a real number.
    bounds : list of 2-tuples
        A list ``[(a1, b1), (a2, b2)]`` containing bounds,
        one per dimension.
    loss_per_triangle : callable, optional
        A function that returns the loss for every triangle.
        If not provided, then a default is used, which uses
        the deviation from a linear estimate, as well as
        triangle area, to determine the loss. See the notes
        for more details.
github python-adaptive / adaptive / adaptive / learner / average_learner.py View on Github external
from math import sqrt

import numpy as np

from adaptive.learner.base_learner import BaseLearner
from adaptive.notebook_integration import ensure_holoviews
from adaptive.utils import cache_latest


class AverageLearner(BaseLearner):
    """A naive implementation of adaptive computing of averages.

    The learned function must depend on an integer input variable that
    represents the source of randomness.

    Parameters
    ----------
    atol : float
        Desired absolute tolerance.
    rtol : float
        Desired relative tolerance.

    Attributes
    ----------
    data : dict
        Sampled points and values.
github python-adaptive / adaptive / adaptive / learner / integrator_learner.py View on Github external
remove = self.err < (abs(self.igral) * eps * Vcond[depth])

        return force_split, remove

    def __repr__(self):
        lst = [
            f"(a, b)=({self.a:.5f}, {self.b:.5f})",
            f"depth={self.depth}",
            f"rdepth={self.rdepth}",
            f"err={self.err:.5E}",
            "igral={:.5E}".format(self.igral if hasattr(self, "igral") else np.inf),
        ]
        return " ".join(lst)


class IntegratorLearner(BaseLearner):
    def __init__(self, function, bounds, tol):
        """
        Parameters
        ----------
        function : callable: X → Y
            The function to learn.
        bounds : pair of reals
            The bounds of the interval on which to learn 'function'.
        tol : float
            Relative tolerance of the error to the integral, this means that
            the learner is done when: `tol > err / abs(igral)`.

        Attributes
        ----------
        approximating_intervals : set of intervals
            The intervals that can be used in the determination of the integral.
github python-adaptive / adaptive / adaptive / learner / skopt_learner.py View on Github external
import collections

import numpy as np
from skopt import Optimizer

from adaptive.learner.base_learner import BaseLearner
from adaptive.notebook_integration import ensure_holoviews
from adaptive.utils import cache_latest


class SKOptLearner(Optimizer, BaseLearner):
    """Learn a function minimum using ``skopt.Optimizer``.

    This is an ``Optimizer`` from ``scikit-optimize``,
    with the necessary methods added to make it conform
    to the ``adaptive`` learner interface.

    Parameters
    ----------
    function : callable
        The function to learn.
    **kwargs :
        Arguments to pass to ``skopt.Optimizer``.
    """

    def __init__(self, function, **kwargs):
        self.function = function
github python-adaptive / adaptive / adaptive / learner / data_saver.py View on Github external
    @copy_docstring_from(BaseLearner.tell)
    def tell(self, x, result):
        y = self.arg_picker(result)
        self.extra_data[x] = result
        self.learner.tell(x, y)
github python-adaptive / adaptive / adaptive / learner / integrator_learner.py View on Github external
remove = self.err < (abs(self.igral) * eps * Vcond[depth])

        return force_split, remove

    def __repr__(self):
        lst = [
            '(a, b)=({:.5f}, {:.5f})'.format(self.a, self.b),
            'depth={}'.format(self.depth),
            'rdepth={}'.format(self.rdepth),
            'err={:.5E}'.format(self.err),
            'igral={:.5E}'.format(self.igral if hasattr(self, 'igral') else np.inf),
        ]
        return ' '.join(lst)


class IntegratorLearner(BaseLearner):

    def __init__(self, function, bounds, tol):
        """
        Parameters
        ----------
        function : callable: X → Y
            The function to learn.
        bounds : pair of reals
            The bounds of the interval on which to learn 'function'.
        tol : float
            Relative tolerance of the error to the integral, this means that
            the learner is done when: `tol > err / abs(igral)`.

        Attributes
        ----------
        approximating_intervals : set of intervals
github python-adaptive / adaptive / adaptive / learner / new_learnerND.py View on Github external
def n_neighbors(self):
        return 1

    def __call__(self, domain, subdomain, codomain_bounds, data):
        dim = domain.ndim

        loss_input_volume = domain.volume(subdomain)
        triangle_loss = TriangleLoss()

        loss_curvature = triangle_loss(domain, subdomain, codomain_bounds, data)
        return (
            loss_curvature + self.exploration * loss_input_volume ** ((2 + dim) / dim)
        ) ** (1 / (2 + dim))


class LearnerND(BaseLearner):
    """Learns a function 'f: ℝ^N → ℝ^m'.

    Parameters
    ---------
    f : callable
        The function to learn. Must take a tuple of N real parameters and return a real
        number or an arraylike of length M.
    bounds : list of 2-tuples or `scipy.spatial.ConvexHull`
        A list ``[(a_1, b_1), (a_2, b_2), ..., (a_N, b_N)]`` describing a bounding box
        in N dimensions, or a convex hull that defines the boundary of the domain.
    loss : callable, optional
        An instance of a subclass of `LossFunction` that describes the loss
        of a subdomain.
    """

    def __init__(self, f, bounds, loss=None):