How to use the gpflow.kernels.Matern52 function in gpflow

To help you get started, we’ve selected a few gpflow examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GPflow / GPflow / tests / test_derivative_kernels.py View on Github external
def test_falls_back_to_default(self):
        kern = gpflow.kernels.Matern52(10)

        deriv_kern = gpflow.derivative_kernel.derivative_kernel_factory(10, 10, kern)

        assert type(deriv_kern) == gpflow.derivative_kernel.DifferentialObservationsKernelDynamic
github GPflow / GPflow / tests / test_session.py View on Github external
def prepare(self):
        with gpflow.defer_build():
            return gpflow.models.GPR(
                np.ones((1, 1)),
                np.ones((1, 1)),
                kern=gpflow.kernels.Matern52(1))
github ICL-SML / Doubly-Stochastic-DGP / tests / test_dgp.py View on Github external
def __init__(self, *args, variance=1., **kwargs):
                        Matern52.__init__(self, *args, **kwargs)
                        del self.variance
                        self.variance = Parameter(variance)
github ICL-SML / Doubly-Stochastic-DGP / tests / test_dgp.py View on Github external
def compare_to_single_layer(self, Y, Ys, lik, L, white, num_outputs=None):
            kern = Matern52(self.X.shape[1], lengthscales=0.5)

            m_svgp = SVGP(self.X, Y, kern, lik, Z=self.X, whiten=white, num_latent=num_outputs)
            m_svgp.q_mu = self.q_mu
            m_svgp.q_sqrt = self.q_sqrt


            L_svgp = m_svgp.compute_log_likelihood()
            mean_svgp, var_svgp = m_svgp.predict_y(self.Xs)
            test_lik_svgp = m_svgp.predict_density(self.Xs, Ys)
            pred_m_svgp, pred_v_svgp = m_svgp.predict_f(self.Xs)
            pred_mfull_svgp, pred_vfull_svgp = m_svgp.predict_f_full_cov(self.Xs)

            kerns = []
            for _ in range(L-1):
                class NoTransformMatern52(Matern52):
                    def __init__(self, *args, variance=1., **kwargs):
github ICL-SML / Doubly-Stochastic-DGP / tests / tests_dgp.py View on Github external
def compare_to_single_layer(self, Y, Ys, lik, init_method):
        kern = Matern52(self.X.shape[1], lengthscales=0.1)

        m_svgp = SVGP(self.X, Y, kern, lik, Z=self.X)
        m_svgp.q_mu = self.q_mu
        m_svgp.q_sqrt = self.q_sqrt

        L_svgp = m_svgp.compute_log_likelihood()
        mean_svgp, var_svgp = m_svgp.predict_y(self.Xs)
        test_lik_svgp = m_svgp.predict_density(self.Xs, Ys)

        m_dgp = DGP(self.X, Y, self.X, [kern], lik, init_layers=init_method)
        m_dgp.layers[0].q_mu = self.q_mu
        m_dgp.layers[0].q_sqrt = self.q_sqrt

        L_dgp = m_dgp.compute_log_likelihood()
        mean_dgp, var_dgp = m_dgp.predict_y(self.Xs, 1)
        test_lik_dgp = m_dgp.predict_density(self.Xs, Ys, 1)
github GPflow / GPflow / unsorted_tests / test_broadcasted_conditionals.py View on Github external
def test_conditional_broadcasting(session_tf, full_cov, white, conditional_type):
    """
    Test that the `conditional` and `sample_conditional` broadcasts correctly
    over leading dimensions of Xnew. Xnew can be shape [..., N, D],
    and conditional should broadcast over the [...].
    """
    X_ = tf.placeholder(tf.float64, [None, None])
    q_mu = np.random.randn(Data.M, Data.Dy)
    q_sqrt = np.tril(np.random.randn(Data.Dy, Data.M, Data.M), -1)

    if conditional_type == "Z":
        feat = Data.Z
        kern = gpflow.kernels.Matern52(Data.Dx, lengthscale=0.5)
    elif conditional_type == "inducing_points":
        feat = gpflow.features.InducingPoints(Data.Z)
        kern = gpflow.kernels.Matern52(Data.Dx, lengthscale=0.5)
    elif conditional_type == "mixing":
        # variational params have different output dim in this case
        q_mu = np.random.randn(Data.M, Data.L)
        q_sqrt = np.tril(np.random.randn(Data.L, Data.M, Data.M), -1)
        feat = mf.MixedKernelSharedMof(gpflow.features.InducingPoints(Data.Z))
        kern = mk.SeparateMixedMok(
            kernels=[gpflow.kernels.Matern52(Data.Dx, lengthscale=0.5) for _ in range(Data.L)],
            W=Data.W
        )

    if conditional_type == "mixing" and full_cov:
        pytest.skip("combination is not implemented")

    num_samples = 5
    sample_tf, mean_tf, cov_tf = sample_conditional(
        X_,
github GPflow / GPflow / tests / test_kerns.py View on Github external
def setUp(self):
        self.test_graph = tf.Graph()
        with self.test_context():
            k1 = gpflow.kernels.Matern32(2)
            k2 = gpflow.kernels.Matern52(2, lengthscales=0.3)
            k3 = k1 * k2
            self.kernels = [k1, k2, k3]
github GPflow / GPflow / _unsorted / _test_method_equivalence.py View on Github external
def test_vgp_vs_svgp(self):
        with self.test_context():
            N, Ns, DX, DY = 100, 10, 2, 2
            np.random.seed(1)
            X = np.random.randn(N, DX)
            Xs = np.random.randn(Ns, DX)
            Y = np.random.randn(N, DY)

            kernel = gpflow.kernels.Matern52(DX)
            likelihood = gpflow.likelihoods.StudentT()

            m_svgp = gpflow.models.SVGP(
                X, Y, kernel, likelihood, X.copy(), whiten=True, q_diag=False)
            m_vgp = gpflow.models.VGP(X, Y, kernel, likelihood)

            m_svgp.compile()
            m_vgp.compile()

            q_mu = np.random.randn(N, DY)
            q_sqrt = np.random.randn(DY, N, N)

            m_svgp.q_mu = q_mu
            m_svgp.q_sqrt = q_sqrt

            m_vgp.q_mu = q_mu
github ICL-SML / Doubly-Stochastic-DGP / tests / test_zoo_models.py View on Github external
def test_vs_DGP2(self):
            lik = Gaussian()
            lik_var = 0.1
            lik.variance = lik_var
            N, Ns, D_Y, D_X = self.X.shape[0], self.Xs.shape[0], self.D_Y, self.X.shape[1]

            q_mu = np.random.randn(N, D_X)

            Y = np.random.randn(N, D_Y)
            Ys = np.random.randn(Ns, D_Y)

            kern1 = Matern52(self.X.shape[1], lengthscales=0.5)
            kern2 = Matern52(self.X.shape[1], lengthscales=0.5)
            kerns = [kern1, kern2]
            # mf = Linear(A=np.random.randn(D_X, D_Y), b=np.random.randn(D_Y))

            mf = Zero()
            m_dgp = DGP(self.X, Y, self.X, kerns, lik, mean_function=mf, white=True)
            m_dgp.layers[0].q_mu = q_mu
            m_dgp.layers[0].q_sqrt = m_dgp.layers[0].q_sqrt.read_value() * 1e-24

            Fs, ms, vs = m_dgp.predict_all_layers(self.Xs, 1)
            Z = self.X.copy()
            Z[:len(self.Xs)] = ms[0][0]
            m_dgp.layers[1].feature.Z = Z  # need to put the inducing points in the right place

            var_list = [[m_dgp.layers[1].q_mu, m_dgp.layers[1].q_sqrt]]
            NatGradOptimizer(gamma=1).minimize(m_dgp, var_list=var_list, maxiter=1)
github GPflow / GPflow / doc / source / notebooks / simple_regression.py View on Github external
def getRegressionModel(X,Y):
    #build the GPR object
    k = gpflow.kernels.Matern52(1)
    meanf = gpflow.mean_functions.Linear(1,0)
    m = gpflow.models.GPR(X, Y, k, meanf)
    m.likelihood.variance = 0.01
    print "Here are the parameters before optimization"
    m
    return m