How to use the gpflow.kernels.Matern32 function in gpflow

To help you get started, we’ve selected a few gpflow examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GPflow / GPflow / tests / test_scaled_euclid_dist.py View on Github external
import numpy as np
import pytest
import tensorflow as tf

import gpflow.kernels as kernels

rng = np.random.RandomState(0)


class Datum:
    num_data = 100
    D = 100
    X = rng.rand(num_data, D) * 100


kernel_list = [kernels.Matern12(), kernels.Matern32(), kernels.Matern52(),
               kernels.Exponential(), kernels.Cosine()]


@pytest.mark.parametrize('kernel', kernel_list)
def test_kernel_euclidean_distance(kernel):
    '''
    Tests output & gradients of kernels that are a function of the (scaled) euclidean distance
    of the points. We test on a high dimensional space, which can generate very small distances
    causing the scaled_square_dist to generate some negative values.
    '''
    K = kernel(Datum.X)
    assert not np.isnan(K).any(), 'NaNs in the output of the ' + kernel.__name__ + 'kernel.'
    assert np.isfinite(K).all(), 'Infs in the output of the ' + kernel.__name__ + ' kernel.'

    X_as_param = tf.Variable(Datum.X)
    with tf.GradientTape() as tape:
github GPflow / GPflow / tests / test_hmc.py View on Github external
def model(self):
        X, Y = np.random.randn(2, 10, 1)
        return gpflow.models.GPMC(
            X, Y,
            kern=gpflow.kernels.Matern32(1),
            likelihood=gpflow.likelihoods.StudentT())
github GPflow / GPflow / tests / test_methods.py View on Github external
def prepare(self):
        rng = np.random.RandomState(0)
        X = rng.randn(100, 2)
        Y = rng.randn(100, 1)
        Z = rng.randn(10, 2)
        lik = gpflow.likelihoods.Gaussian()
        kern = gpflow.kernels.Matern32(2)
        Xs = rng.randn(10, 2)

        # make one of each model
        ms = []
        #for M in (gpflow.models.GPMC, gpflow.models.VGP):
        for M in (gpflow.models.VGP, gpflow.models.GPMC):
            ms.append(M(X, Y, kern, lik))
        for M in (gpflow.models.SGPMC, gpflow.models.SVGP):
            ms.append(M(X, Y, kern, lik, Z))
        ms.append(gpflow.models.GPR(X, Y, kern))
        ms.append(gpflow.models.SGPR(X, Y, kern, Z=Z))
        ms.append(gpflow.models.GPRFITC(X, Y, kern, Z=Z))
        return ms, Xs, rng
github GPflow / GPflow / unsorted_tests / test_predict.py View on Github external
def kernel(cls):
        return gpflow.kernels.Matern32(cls.input_dim)
github GPflow / GPflow / tests / test_prior.py View on Github external
def get_gpmc_model_params():
    kernel = gpflow.kernels.Matern32()
    likelihood = gpflow.likelihoods.Gaussian()
    data = [np.arange(5), np.arange(5)]
    return data, kernel, likelihood
github jameshensman / VFF / experiments / airline / airline_additive_figure.py View on Github external
def plot(m):
    fig, axes = plt.subplots(2, 4, figsize=(16, 5))
    Xtest = np.linspace(0, 1, 200)[:, None]
    mu, var = m.predict_components(Xtest)
    for i in range(mu.shape[1]):
        ax = axes.flatten()[i]
        Xplot = Xtest * (Xmax[i] - Xmin[i]) + Xmin[i]
        ax.plot(Xplot, mu[:, i], lw=2, color='C0')
        ax.plot(Xplot, mu[:, i] + 2*np.sqrt(var[:, i]), 'C0--', lw=1)
        ax.plot(Xplot, mu[:, i] - 2*np.sqrt(var[:, i]), 'C0--', lw=1)
        ax.set_title(names[i])


if __name__ == '__main__':
    m = vff.gpr.GPR_additive(X, Y, np.arange(30), np.zeros(X.shape[1]) - 2, np.ones(X.shape[1]) + 2,
                             [gpflow.kernels.Matern32(1) for i in range(X.shape[1])])
    opt = gpflow.train.ScipyOptimizer()
    opt.minimize(m)

    plot(m)

    plt.show()
github jameshensman / VFF / VFF / sfgpmc_kronecker.py View on Github external
cmap=plt.cm.viridis, linewidth=0.2)
        mu, var = m.predict_f(Xtest)
        ax1.contour(xtest, ytest, mu.reshape(100, 100),
                    cmap=plt.cm.viridis, linewidths=6,
                    vmin=0, vmax=2.7)
        ax2.contour(xtest, ytest, var.reshape(100, 100),
                    cmap=plt.cm.viridis, linewidths=6,
                    vmin=0, vmax=0.5)

        ax1.set_xlim(-1.5, 1.5)
        ax1.set_ylim(-1.5, 1.5)
        ax2.set_xlim(-1.5, 1.5)
        ax2.set_ylim(-1.5, 1.5)

    lik = gpflow.likelihoods.Exponential
    for k in [gpflow.kernels.Matern32]:

        a = X.min(0) - 1.5
        b = X.max(0) + 1.5

        Ms = np.arange(10)

        m = SFGPMC_kron(X, Y, Ms, a=a, b=b, kerns=[k(1), k(1)], likelihood=lik())
        m0 = gpflow.gpmc.GPMC(X, Y, kern=k(1, active_dims=[0]) * k(1, active_dims=[1]), likelihood=lik())
        # m.kern.matern32_1

        # fix the kernels
        for k in m.kerns:
            k.lengthscales.fixed = True
            k.variance.fixed = True
        m0.kern.matern32_1.variance.fixed = True
        m0.kern.matern32_1.lengthscales.fixed = True
github oxfordcontrol / Bayesian-Optimization / run.py View on Github external
import random
import argparse
import gpflow
from methods.oei import OEI
from methods.random import Random
import time
import pickle
from benchmark_functions import scale_function, hart6
import copy

algorithms = {
    'OEI': OEI,
    'Random': Random
}

class SafeMatern32(gpflow.kernels.Matern32):
    # See https://github.com/GPflow/GPflow/pull/727
    def euclid_dist(self, X, X2):
        r2 = self.square_dist(X, X2)
        return tf.sqrt(tf.maximum(r2, 1e-40))


def run(options, seed, robust=False, save=False):
    '''
    Runs bayesian optimization on the setup defined in the options dictionary
    starting from a predefined seed. Saves results on the folder named 'out' while logging 
    is saved on the folder 'log'.
    '''
    options['seed'] = seed
    # Set random seed: Numpy, Tensorflow, Python 
    tf.reset_default_graph()
    tf.set_random_seed(seed)
github jameshensman / VFF / experiments / increasing_dim / Exp_1 / kron.py View on Github external
def prodkern(dim):
    return gpflow.kernels.Prod([gpflow.kernels.Matern32(1, active_dims=[i], lengthscales=lengthscale)
                                for i in range(dim)])
k = prodkern(dim)