How to use the numdifftools.Gradient function in numdifftools

To help you get started, we’ve selected a few numdifftools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github popgenmethods / momi2 / test / test_autograd.py View on Github external
def check_gradient(f, x):
    print(x, "\n", f(x))

    print("# grad2")
    grad2 = Gradient(f)(x)
    print("# building grad1")
    g = grad(f)
    print("# computing grad1")
    grad1 = g(x)

    print("gradient1\n", grad1, "\ngradient2\n", grad2)
    np.allclose(grad1, grad2)

    # check Hessian vector product
    y = np.random.normal(size=x.shape)
    gdot = lambda u: np.dot(g(u), y)
    hess1, hess2 = grad(gdot)(x), Gradient(gdot)(x)
    print("hess1\n", hess1, "\nhess2\n", hess2)
    np.allclose(hess1, hess2)
github oxfordcontrol / Bayesian-Optimization / tests / test_derivatives.py View on Github external
def derivatives_numerical(x, model):
    '''
    Returns the gradient and hessian of the optimal value of
    the SDP with respect to x.
    Beware, the hessian is based on the analytical derivative,
    for accuracy and performance reasons.
    '''
    def opt_val(y):
        return model.acquisition(y)[0]

    def gradient(y):
        return model.acquisition(y)[1]

    gradient_numerical = nd.Gradient(opt_val)(x)
    hessian_numerical = nd.Hessian(opt_val)(x)

    return gradient_numerical, hessian_numerical
github popgenmethods / momi2 / test / test_autograd.py View on Github external
print(x, "\n", f(x))

    print("# grad2")
    grad2 = Gradient(f)(x)
    print("# building grad1")
    g = grad(f)
    print("# computing grad1")
    grad1 = g(x)

    print("gradient1\n", grad1, "\ngradient2\n", grad2)
    np.allclose(grad1, grad2)

    # check Hessian vector product
    y = np.random.normal(size=x.shape)
    gdot = lambda u: np.dot(g(u), y)
    hess1, hess2 = grad(gdot)(x), Gradient(gdot)(x)
    print("hess1\n", hess1, "\nhess2\n", hess2)
    np.allclose(hess1, hess2)
github pymc-devs / pymc3 / tests / test_distributions.py View on Github external
def check_dlogp(model, value, domains): 

    domains = [d[1:-1] for d in domains]
    bij = DictToArrayBijection(ArrayOrdering(model.cont_vars), model.test_point)

    if not model.cont_vars:
        return 

    dlp = model.dlogpc()
    dlogp = bij.mapf(model.dlogpc())
    
    lp = model.logpc
    logp = bij.mapf(model.logpc)
    ndlogp = Gradient(logp)

    for a in its.product(*domains):
        pt = Point(dict( (str(var), val) for var,val in zip(model.vars, a)), model = model)

        pt = bij.map(pt)
github bamos / densenet.pytorch / attic / numcheck-grads.py View on Github external
mse = np.mean(mse_batch)
    return mse

def unpack(W12_flat):
    W1, W2 = np.split(W12_flat, [nFeatures*nHidden1])
    W1 = W1.reshape(nHidden1, nFeatures)
    W2 = W2.reshape(nHidden2, nFeatures+nHidden1)
    return W1, W2

W12_flat = torch.cat((W1.data.view(-1), W2.data.view(-1))).cpu().numpy()
print('The PyTorch loss is {:.3f}. f_loss for numeric diff is {:.2f}.'.format(
    loss.data[0], f_loss(W12_flat)))

assert(np.abs(loss.data[0] - f_loss(W12_flat)) < 1e-4)

g = nd.Gradient(f_loss)
dW12_flat = g(W12_flat)
dW1, dW2 = unpack(dW12_flat)

def printGrads(tag, W, dW):
    print('\n' + '-'*40 + '''
The gradient w.r.t. {0} from PyTorch is:

{1}

The gradient w.r.t. {0} from numeric differentiation is:

{2}'''.format(tag, W.grad, dW))


printGrads('W1', W1, dW1)
printGrads('W2', W2, dW2)
github dit / dit / dit / algorithms / optutil.py View on Github external
def build_gradient_hessian(self):
        import numdifftools

        self.gradient = numdifftools.Gradient(self.func)
        self.hessian = numdifftools.Hessian(self.func)
github b45ch1 / algopy / documentation / sphinx / examples / himmelblau_minimization.py View on Github external
def show_local_curvature(f, g, h, x0):
    print 'point:'
    print x0
    print 'function value:'
    print f(x0)
    print 'autodiff gradient:'
    print g(x0)
    print 'finite differences gradient:'
    print numdifftools.Gradient(f)(x0)
    print 'autodiff hessian:'
    print h(x0)
    print 'finite differences hessian:'
    print numdifftools.Hessian(f)(x0)
github rsnemmen / nmmn / nmmn / stats.py View on Github external
corresponding to the [input] x array.

References:
1. http://www.graphpad.com/faq/viewfaq.cfm?faq=1099, "How does Prism compute confidence and prediction bands for nonlinear regression?"
2. http://stats.stackexchange.com/questions/15423/how-to-compute-prediction-bands-for-non-linear-regression
3. see also my notebook)
	"""
	import numdifftools
	
	alpha=1.-conf	# significance
	n=xd.size	# data sample size

	if x is None: x=numpy.linspace(xd.min(),xd.max(),100)

	# Gradient (needs to be evaluated)
	dfun=numdifftools.Gradient(fun)
	
	# Quantile of Student's t distribution for p=1-alpha/2
	q=scipy.stats.t.ppf(1.-alpha/2.,n-2)

	# Residual sum of squares		
	rss=residual(yd, evalfun(fun,xd,par) )
	
	grad,p=[],[]
	i=0
	y=evalfun(fun,x,par)
	v=numpy.zeros_like(x)
		
	for i in range(x.size):
		# List: arrays consisting of [x[i], par1, par2, ...]
		p.append( numpy.concatenate(([x[i]],par)) )