How to use the numdifftools.core.MinStepGenerator function in numdifftools

To help you get started, we’ve selected a few numdifftools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_default_generator(self):
        step_gen = nd.MinStepGenerator(base_step=None, num_steps=10,
                                       step_ratio=4, offset=-1)
        h = np.array([h for h in step_gen(0)])
        desired = np.array([3.58968236e-02, 8.97420590e-03, 2.24355147e-03,
                            5.60887869e-04, 1.40221967e-04, 3.50554918e-05,
                            8.76387295e-06, 2.19096824e-06, 5.47742059e-07,
                            1.36935515e-07])

        assert_array_almost_equal((h - desired) / desired, 0)
github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_complex(self):
        def fun(x):
            return x[0] + x[1] ** 2 + x[2] ** 3
        htrue = np.array([0., 2., 18.])
        method = 'complex'
        for num_steps in range(3, 7, 1):
            steps = nd.MinStepGenerator(num_steps=num_steps,
                                        use_exact_steps=True,
                                        step_ratio=2.0, offset=4)
            Hfun = nd.Hessdiag(fun, step=steps, method=method,
                               full_output=True)
            hd, _info = Hfun([1, 2, 3])
            _error = hd - htrue
            assert_array_almost_equal(hd, htrue)
github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_hessian_cosIx_yI_at_I0_0I(self):
        # cos(x-y), at (0,0)

        def fun(xy):
            return np.cos(xy[0] - xy[1])
        htrue = [[-1., 1.], [1., -1.]]
        methods = ['multicomplex', 'complex', 'central', 'central2', 'forward',
                   'backward']
        for num_steps in [10, 1]:
            step = nd.MinStepGenerator(num_steps=num_steps)
            for method in methods:
                Hfun2 = nd.Hessian(fun, method=method, step=step,
                                   full_output=True)
                h2, _info = Hfun2([0, 0])
                # print(method, (h2-np.array(htrue)))
                assert_array_almost_equal(h2, htrue)
github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_central_and_forward_derivative_on_log(self):
        # Although a central rule may put some samples in the wrong places, it
        # may still succeed
        epsilon = nd.MinStepGenerator(num_steps=15, offset=0, step_ratio=2)
        dlog = nd.Derivative(np.log, method='central', step=epsilon)
        x = 0.001
        self.assertAlmostEqual(dlog(x), 1.0 / x)

        # But forcing the use of a one-sided rule may be smart anyway
        dlog = nd.Derivative(np.log, method='forward', step=epsilon)
        self.assertAlmostEqual(dlog(x), 1 / x)
github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_fixed_base_step(self):
        desired = 0.1
        step_gen = nd.MinStepGenerator(base_step=desired, num_steps=1, scale=2,
                                       offset=0)
        h = [h for h in step_gen(0)]
        assert_array_almost_equal((h[0] - desired) / desired, 0)
github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_default_base_step(self):
        step_gen = nd.MinStepGenerator(num_steps=1, offset=0)
        h = [h for h in step_gen(0)]
        desired = nd.EPS ** (1. / 2.5)
        assert_array_almost_equal((h[0] - desired) / desired, 0)
github pbrod / numdifftools / tests / test_numdifftools.py View on Github external
def test_fixed_step(self):
        def fun(x):
            return x[0] + x[1] ** 2 + x[2] ** 3
        htrue = np.array([0., 2., 18.])

        methods = ['multicomplex', 'complex', 'central', 'forward', 'backward']
        for order in range(2, 7, 2):
            steps = nd.MinStepGenerator(num_steps=order + 1,
                                        use_exact_steps=True,
                                        step_ratio=3., offset=0)
            for method in methods:
                Hfun = nd.Hessdiag(fun, step=steps, method=method, order=order,
                                   full_output=True)
                hd, _info = Hfun([1, 2, 3])
                _error = hd - htrue
                assert_array_almost_equal(hd, htrue)
github pbrod / numdifftools / src / numdifftools / run_benchmark.py View on Github external
def main(problem_sizes=(4, 8, 16, 32, 64, 96)):
    fixed_step = MinStepGenerator(num_steps=1, use_exact_steps=True, offset=0)
    epsilon = MaxStepGenerator(num_steps=14, use_exact_steps=True,
                               step_ratio=1.6, offset=0)
    adaptiv_txt = '_adaptive_{0:d}_{1!s}_{2:d}'.format(epsilon.num_steps,
                                                       str(epsilon.step_ratio),
                                                       epsilon.offset)
    gradient_funs = OrderedDict()
    hessian_funs = OrderedDict()

    # hessian_fun = 'Hessdiag'
    hessian_fun = 'Hessian'

    if nda is not None:
        nda_method = 'forward'
        nda_txt = 'algopy_' + nda_method
        gradient_funs[nda_txt] = nda.Jacobian(1, method=nda_method)