How to use the numdifftools.Hessian function in numdifftools

To help you get started, we’ve selected a few numdifftools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github threeML / threeML / threeML / utils / differentiation.py View on Github external
def get_hessian(function, point, minima, maxima):

    wrapper, scaled_deltas, scaled_point, orders_of_magnitude, n_dim = _get_wrapper(
        function, point, minima, maxima
    )

    # Compute the Hessian matrix at best_fit_values

    hessian_matrix_ = nd.Hessian(wrapper, scaled_deltas)(scaled_point)

    # Transform it to numpy matrix

    hessian_matrix = np.array(hessian_matrix_)

    # Now correct back the Hessian for the scales
    for i in range(n_dim):

        for j in range(n_dim):

            hessian_matrix[i, j] /= orders_of_magnitude[i] * orders_of_magnitude[j]

    return hessian_matrix
github b45ch1 / algopy / documentation / sphinx / examples / wood_function_minimization.py View on Github external
def show_local_curvature(f, g, h, x0):
    print 'point:'
    print x0
    print 'function value:'
    print f(x0)
    print 'autodiff gradient:'
    print g(x0)
    print 'finite differences gradient:'
    print numdifftools.Gradient(f)(x0)
    print 'autodiff hessian:'
    print h(x0)
    print 'finite differences hessian:'
    print numdifftools.Hessian(f)(x0)
github b45ch1 / algopy / documentation / sphinx / examples / neg_binom_regression.py View on Github external
0, 0, 0, 0,
        0.5,
        ], dtype=float)

    # do the max likelihood search
    results = scipy.optimize.fmin_ncg(
            f,
            theta0,
            fprime=g,
            fhess=h,
            avextol=1e-6,
            )

    # compute the hessian a couple of different ways
    algopy_hessian = h(results)
    num_hessian = numdifftools.Hessian(f)(results)

    # report the results of the search including aic and standard error
    print('search results:')
    print(results)
    print()
    print('aic:')
    print(get_aic(y, X, results))
    print()
    print('standard error using observed fisher information,')
    print('with hessian computed using algopy:')
    print(numpy.sqrt(numpy.diag(scipy.linalg.inv(algopy_hessian))))
    print()
    print('standard error using observed fisher information,')
    print('with hessian computed using numdifftools:')
    print(numpy.sqrt(numpy.diag(scipy.linalg.inv(num_hessian))))
    print()
github b45ch1 / algopy / documentation / sphinx / examples / preferred_nucleotide.py View on Github external
# initialize the guess with the actual simulation parameter values
    y_guess = numpy.array([log_mu, d], dtype=float)
    result = scipy.optimize.fmin(
            eval_f_partial,
            y_guess,
            maxiter=10000,
            maxfun=10000,
            full_output=True,
            )
    print('fmin result:')
    print(result)
    print()
    y_opt = result[0]
    log_mu_opt, d_opt = y_opt[0], y_opt[1]
    mu_opt = numpy.exp(log_mu_opt)
    cov = scipy.linalg.inv(numdifftools.Hessian(eval_f_partial)(y_opt))
    print('maximum likelihood parameter estimates:')
    print('log mu:', log_mu_opt, end=' ')
    print('with standard deviation', numpy.sqrt(cov[0,0]))
    print('d:', d_opt, end=' ')
    print('with standard deviation', numpy.sqrt(cov[1,1]))
    print()
    print('gradient:')
    print(numdifftools.Gradient(eval_f_partial)(y_opt))
    print()
github dit / dit / dit / algorithms / optutil.py View on Github external
def build_gradient_hessian(self):
        import numdifftools

        self.gradient = numdifftools.Gradient(self.func)
        self.hessian = numdifftools.Hessian(self.func)
github b45ch1 / algopy / documentation / sphinx / examples / minimization / minhelper.py View on Github external
def show_local_curvature(f, g, h, x0):
    print('point:')
    print(x0)
    print('function value:')
    print(f(x0))
    print('autodiff gradient:')
    print(g(x0))
    print('finite differences gradient:')
    print(numdifftools.Gradient(f)(x0))
    print('autodiff hessian:')
    print(h(x0))
    print('finite differences hessian:')
    print(numdifftools.Hessian(f)(x0))
github statsmodels / statsmodels / scikits / statsmodels / sandbox / regression / numdiff.py View on Github external
print he[0] - 2*np.dot(x.T, x)

    for eps in [1e-3,1e-4,1e-5,1e-6]:
        print 'eps =', eps
        print approx_hess(xk,fun2,eps,args)[0] - 2*np.dot(x.T, x)

    hcs2 = approx_hess_cs2(xk,fun2,args=args)
    print 'hcs2'
    print hcs2 - 2*np.dot(x.T, x)

    hfd3 = approx_hess3(xk,fun2,args=args)
    print 'hfd3'
    print hfd3 - 2*np.dot(x.T, x)

    import numdifftools as nd
    hnd = nd.Hessian(lambda a: fun2(a, y, x))
    hessnd = hnd(xk)
    print 'numdiff'
    print hessnd - 2*np.dot(x.T, x)
    #assert_almost_equal(hessnd, he[0])
    gnd = nd.Gradient(lambda a: fun2(a, y, x))
    gradnd = gnd(xk)
'''
>>> hnd = nd.Hessian(lambda a: fun2(a, x))
>>> hnd(xk)
array([[ 216.87702746,   -3.41892545,    1.87887281],
       [  -3.41892545,  180.76379116,  -13.74326021],
       [   1.87887281,  -13.74326021,  198.5155617 ]])
>>> he
(array([[ 216.87702746,   -3.41892545,    1.87887281],
       [  -3.41892545,  180.76379116,  -13.74326021],
       [   1.87887281,  -13.74326021,  198.5155617 ]]), array([ 2.35204474,  1.92684939,  2.11920745]))
github b45ch1 / algopy / documentation / sphinx / speed_comparison / use_numdifftools.py View on Github external
def __init__(self, f, x, test = 'f'):
        self.f = f
        self.x = x
        
        self.g = nd.Gradient(f)

        self.H = nd.Hessian(f)