How to use the gpflow.train.ScipyOptimizer function in gpflow

To help you get started, we’ve selected a few gpflow examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github GPflow / GPflow / _unsorted / _test_gplvm.py View on Github external
def test_otherkernel(self):
        with self.test_context():
            k = kernels.Periodic(self.Q)
            XInit = self.rng.rand(self.N, self.Q)
            m = gpflow.models.GPLVM(self.Y, self.Q, kernel=k, x_data_mean=XInit)
            linit = m.compute_log_likelihood()
            opt = gpflow.train.ScipyOptimizer()
            opt.minimize(m, maxiter=2)
            self.assertTrue(m.compute_log_likelihood() > linit)
github GPflow / GPflow / tests / test_coregion.py View on Github external
def setup(self):
        vgp0, vgp1, cvgp, Xtest = self.prepare()
        opt1 = gpflow.train.ScipyOptimizer()
        opt2 = gpflow.train.ScipyOptimizer()
        opt3 = gpflow.train.ScipyOptimizer()
        opt1.minimize(vgp0, maxiter=50)
        opt2.minimize(vgp1, maxiter=50)
        opt3.minimize(cvgp, maxiter=50)
        self.Xtest = Xtest
        self.vgp0 = vgp0
        self.vgp1 = vgp1
        self.cvgp = cvgp
github GPflow / GPflow / tests / test_priors.py View on Github external
def testGammaMode(self):
        with self.test_context():
            m = self.prepare()
            m.x = gpflow.Param(1.0, autobuild=False)
            shape, scale = 4., 5.
            m.x.prior = gpflow.priors.Gamma(shape, scale)

            m.compile()
            opt = gpflow.train.ScipyOptimizer()
            opt.minimize(m)

            true_mode = (shape - 1.) * scale
            assert_allclose(m.x.read_value(), true_mode, 1e-3)
github GPflow / GPflow / tests / test_coregion.py View on Github external
def setup(self):
        vgp0, vgp1, cvgp, Xtest = self.prepare()
        opt1 = gpflow.train.ScipyOptimizer()
        opt2 = gpflow.train.ScipyOptimizer()
        opt3 = gpflow.train.ScipyOptimizer()
        opt1.minimize(vgp0, maxiter=50)
        opt2.minimize(vgp1, maxiter=50)
        opt3.minimize(cvgp, maxiter=50)
        self.Xtest = Xtest
        self.vgp0 = vgp0
        self.vgp1 = vgp1
        self.cvgp = cvgp
github GPflow / GPflow / tests / test_coregion.py View on Github external
def setup(self):
        vgp0, vgp1, cvgp, Xtest = self.prepare()
        opt1 = gpflow.train.ScipyOptimizer()
        opt2 = gpflow.train.ScipyOptimizer()
        opt3 = gpflow.train.ScipyOptimizer()
        opt1.minimize(vgp0, maxiter=50)
        opt2.minimize(vgp1, maxiter=50)
        opt3.minimize(cvgp, maxiter=50)
        self.Xtest = Xtest
        self.vgp0 = vgp0
        self.vgp1 = vgp1
        self.cvgp = cvgp
github GPflow / GPflow / tests / test_method_equivalence.py View on Github external
def test_all(self):
        with self.test_context() as session:
            models = self.prepare()
            likelihoods = []
            for m in models:
                opt = gpflow.train.ScipyOptimizer()
                opt.minimize(m, maxiter=300)
                neg_obj = tf.negative(m.objective)
                likelihoods.append(session.run(neg_obj).squeeze())
            assert_allclose(likelihoods, likelihoods[0], rtol=1e-6)
            variances, lengthscales = [], []
            for m in models:
                if hasattr(m.kern, 'rbf'):
                    variances.append(m.kern.rbf.variance.read_value())
                    lengthscales.append(m.kern.rbf.lengthscales.read_value())
                else:
                    variances.append(m.kern.variance.read_value())
                    lengthscales.append(m.kern.lengthscales.read_value())
            variances, lengthscales = np.array(variances), np.array(lengthscales)
            assert_allclose(variances, variances[0], 1e-5)
            assert_allclose(lengthscales, lengthscales.mean(), 1e-4)
            mu0, var0 = models[0].predict_y(self.Xtest)
github GPflow / GPflow / tests / test_profiling.py View on Github external
def test_profile(self):
        m = self.prepare()
        s = gpflow.settings.get_settings()
        s.profiling.dump_timeline = True
        s.profiling.output_directory = tf.test.get_temp_dir()
        s.profiling.output_file_name = 'test_trace_profile'

        with gpflow.settings.temp_settings(s):
            with gpflow.session_manager.get_session().as_default():
                m.compile()
                opt = gpflow.train.ScipyOptimizer()
                opt.minimize(m, maxiter=10)

        expected_file = os.path.join(s.profiling.output_directory,
                                     s.profiling.output_file_name + '.json')

        self.assertTrue(os.path.exists(expected_file))
        os.remove(expected_file)
github jameshensman / VFF / experiments / airline / airline_additive_figure.py View on Github external
fig, axes = plt.subplots(2, 4, figsize=(16, 5))
    Xtest = np.linspace(0, 1, 200)[:, None]
    mu, var = m.predict_components(Xtest)
    for i in range(mu.shape[1]):
        ax = axes.flatten()[i]
        Xplot = Xtest * (Xmax[i] - Xmin[i]) + Xmin[i]
        ax.plot(Xplot, mu[:, i], lw=2, color='C0')
        ax.plot(Xplot, mu[:, i] + 2*np.sqrt(var[:, i]), 'C0--', lw=1)
        ax.plot(Xplot, mu[:, i] - 2*np.sqrt(var[:, i]), 'C0--', lw=1)
        ax.set_title(names[i])


if __name__ == '__main__':
    m = vff.gpr.GPR_additive(X, Y, np.arange(30), np.zeros(X.shape[1]) - 2, np.ones(X.shape[1]) + 2,
                             [gpflow.kernels.Matern32(1) for i in range(X.shape[1])])
    opt = gpflow.train.ScipyOptimizer()
    opt.minimize(m)

    plot(m)

    plt.show()
github GPflow / GPflow / doc / source / notebooks / ep_vs_vgp_binary_classification_demo.py View on Github external
USE_ADAM = False

    # VGP Model:
    print("Running VGP model.")
    m = gpflow.models.VGP(Xtrain, Ytrain,
                                kern=gpflow.kernels.RBF(2),
                                likelihood=gpflow.likelihoods.Bernoulli())
    if not TRAIN_KERNEL:
        m.kern.lengthscales.set_trainable(False)
        m.kern.variance.set_trainable(False)
    m.compile()
    print("VGP model's initial model log likelihood: {}".format(m.compute_log_likelihood()))
    if USE_ADAM:
        gpflow.train.AdamOptimizer().minimize(m, maxiter=500)
    else:
        gpflow.train.ScipyOptimizer(options=dict(maxiter=100)).minimize(m)
    plot(m, axarr[0])
    print("VGP model's final model log likelihood: {}".format(m.compute_log_likelihood()))
    print("VGP model's final kernel variance: {}".format(m.kern.variance.read_value()))
    print("VGP model's final kernel lengthscale: {}".format(m.kern.lengthscales.read_value()))
    print("=================================\n\n")

    # EP Binary Classification Model:
    print("Running Binary EP Classification Model")
    sess = tf.Session()
    #sess = tf_debug.LocalCLIDebugWrapperSession(sess)
    # ^ if debugging run with this uncommented and ` python -m ep_classification_demo --debug`
    m2 = ep_approximated_like.EPLikeApproxGP(Xtrain, Ytrain, kern=gpflow.kernels.RBF(2),
                                             likelihood=gpflow.likelihoods.Bernoulli(),
                                             use_cache_on_like=False)
    m2.compile(session=sess)
    tau_tilde, nu_tilde, num_iter = m2.run_ep()