How to use the numdifftools.nd_algopy function in numdifftools

To help you get started, we’ve selected a few numdifftools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github pbrod / numdifftools / tests / test_nd_algopy.py View on Github external
def test_fun_with_additional_parameters(self):
        '''Test for issue #9'''
        def func(x, a, b=1):
            return b * a * x * x * x
        methods = ['reverse', 'forward']
        dfuns = [nd.Jacobian, nd.Derivative, nd.Gradient,  nd.Hessdiag,
                 nd.Hessian]
        for dfun in dfuns:
            for method in methods:
                df = dfun(func, method=method)
                val = df(0.0, 1.0, b=2)
                assert_array_almost_equal(val, 0)
github pbrod / numdifftools / tests / test_nd_algopy.py View on Github external
def test_on_vector_valued_function(self):
        xdata = np.reshape(np.arange(0, 1, 0.1), (-1, 1))
        ydata = 1 + 2 * np.exp(0.75 * xdata)

        def fun(c):
            return (c[0] + c[1] * np.exp(c[2] * xdata) - ydata) ** 2

        for method in ['reverse']:  # TODO: 'forward' fails

            Jfun = nd.Jacobian(fun, method=method)
            J = Jfun([1, 2, 0.75])  # should be numerically zero
            assert_array_almost_equal(J, np.zeros(J.shape))
github pbrod / numdifftools / numdifftools / speed_comparison / run_benchmarks.py View on Github external
# GRADIENT COMPUTATION
# --------------------
gradient_N_list = [2, 4, 8, 16, 32, 64, 96]
# gradient_N_list = [20]

results_gradient_list = []
for N in gradient_N_list:
    print 'N=', N
    results_gradient = np.zeros((4, 3))
    # algopy, UTPS variant
    f = benchmark1.F(N)
    f0 = f(3 * np.ones(N))
    t = time.time()
    gradient = algopy.Gradient(f, method='forward')
    preproc_time = time.time() - t
    t = time.time()
    ref_g = gradient(3 * np.ones(N))
    run_time = time.time() - t
    results_gradient[method['algopy_reverse']] = run_time,  0.0, preproc_time

    # scientifc
    f = benchmark1.F(N)
    t = time.time()
    gradient = scientific.Gradient(f)
    preproc_time = time.time() - t
    t = time.time()
    g = gradient(3 * np.ones(N))
    run_time = time.time() - t
    results_gradient[method['scientific']] = run_time,  np.linalg.norm(
        g - ref_g) / np.linalg.norm(ref_g), preproc_time