Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_otherkernel(self):
with self.test_context():
k = kernels.Periodic(self.Q)
XInit = self.rng.rand(self.N, self.Q)
m = gpflow.models.GPLVM(self.Y, self.Q, kernel=k, x_data_mean=XInit)
linit = m.compute_log_likelihood()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=2)
self.assertTrue(m.compute_log_likelihood() > linit)
def setup(self):
vgp0, vgp1, cvgp, Xtest = self.prepare()
opt1 = gpflow.train.ScipyOptimizer()
opt2 = gpflow.train.ScipyOptimizer()
opt3 = gpflow.train.ScipyOptimizer()
opt1.minimize(vgp0, maxiter=50)
opt2.minimize(vgp1, maxiter=50)
opt3.minimize(cvgp, maxiter=50)
self.Xtest = Xtest
self.vgp0 = vgp0
self.vgp1 = vgp1
self.cvgp = cvgp
def testGammaMode(self):
with self.test_context():
m = self.prepare()
m.x = gpflow.Param(1.0, autobuild=False)
shape, scale = 4., 5.
m.x.prior = gpflow.priors.Gamma(shape, scale)
m.compile()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m)
true_mode = (shape - 1.) * scale
assert_allclose(m.x.read_value(), true_mode, 1e-3)
def setup(self):
vgp0, vgp1, cvgp, Xtest = self.prepare()
opt1 = gpflow.train.ScipyOptimizer()
opt2 = gpflow.train.ScipyOptimizer()
opt3 = gpflow.train.ScipyOptimizer()
opt1.minimize(vgp0, maxiter=50)
opt2.minimize(vgp1, maxiter=50)
opt3.minimize(cvgp, maxiter=50)
self.Xtest = Xtest
self.vgp0 = vgp0
self.vgp1 = vgp1
self.cvgp = cvgp
def setup(self):
vgp0, vgp1, cvgp, Xtest = self.prepare()
opt1 = gpflow.train.ScipyOptimizer()
opt2 = gpflow.train.ScipyOptimizer()
opt3 = gpflow.train.ScipyOptimizer()
opt1.minimize(vgp0, maxiter=50)
opt2.minimize(vgp1, maxiter=50)
opt3.minimize(cvgp, maxiter=50)
self.Xtest = Xtest
self.vgp0 = vgp0
self.vgp1 = vgp1
self.cvgp = cvgp
def test_all(self):
with self.test_context() as session:
models = self.prepare()
likelihoods = []
for m in models:
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=300)
neg_obj = tf.negative(m.objective)
likelihoods.append(session.run(neg_obj).squeeze())
assert_allclose(likelihoods, likelihoods[0], rtol=1e-6)
variances, lengthscales = [], []
for m in models:
if hasattr(m.kern, 'rbf'):
variances.append(m.kern.rbf.variance.read_value())
lengthscales.append(m.kern.rbf.lengthscales.read_value())
else:
variances.append(m.kern.variance.read_value())
lengthscales.append(m.kern.lengthscales.read_value())
variances, lengthscales = np.array(variances), np.array(lengthscales)
assert_allclose(variances, variances[0], 1e-5)
assert_allclose(lengthscales, lengthscales.mean(), 1e-4)
mu0, var0 = models[0].predict_y(self.Xtest)
def test_profile(self):
m = self.prepare()
s = gpflow.settings.get_settings()
s.profiling.dump_timeline = True
s.profiling.output_directory = tf.test.get_temp_dir()
s.profiling.output_file_name = 'test_trace_profile'
with gpflow.settings.temp_settings(s):
with gpflow.session_manager.get_session().as_default():
m.compile()
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m, maxiter=10)
expected_file = os.path.join(s.profiling.output_directory,
s.profiling.output_file_name + '.json')
self.assertTrue(os.path.exists(expected_file))
os.remove(expected_file)
fig, axes = plt.subplots(2, 4, figsize=(16, 5))
Xtest = np.linspace(0, 1, 200)[:, None]
mu, var = m.predict_components(Xtest)
for i in range(mu.shape[1]):
ax = axes.flatten()[i]
Xplot = Xtest * (Xmax[i] - Xmin[i]) + Xmin[i]
ax.plot(Xplot, mu[:, i], lw=2, color='C0')
ax.plot(Xplot, mu[:, i] + 2*np.sqrt(var[:, i]), 'C0--', lw=1)
ax.plot(Xplot, mu[:, i] - 2*np.sqrt(var[:, i]), 'C0--', lw=1)
ax.set_title(names[i])
if __name__ == '__main__':
m = vff.gpr.GPR_additive(X, Y, np.arange(30), np.zeros(X.shape[1]) - 2, np.ones(X.shape[1]) + 2,
[gpflow.kernels.Matern32(1) for i in range(X.shape[1])])
opt = gpflow.train.ScipyOptimizer()
opt.minimize(m)
plot(m)
plt.show()
USE_ADAM = False
# VGP Model:
print("Running VGP model.")
m = gpflow.models.VGP(Xtrain, Ytrain,
kern=gpflow.kernels.RBF(2),
likelihood=gpflow.likelihoods.Bernoulli())
if not TRAIN_KERNEL:
m.kern.lengthscales.set_trainable(False)
m.kern.variance.set_trainable(False)
m.compile()
print("VGP model's initial model log likelihood: {}".format(m.compute_log_likelihood()))
if USE_ADAM:
gpflow.train.AdamOptimizer().minimize(m, maxiter=500)
else:
gpflow.train.ScipyOptimizer(options=dict(maxiter=100)).minimize(m)
plot(m, axarr[0])
print("VGP model's final model log likelihood: {}".format(m.compute_log_likelihood()))
print("VGP model's final kernel variance: {}".format(m.kern.variance.read_value()))
print("VGP model's final kernel lengthscale: {}".format(m.kern.lengthscales.read_value()))
print("=================================\n\n")
# EP Binary Classification Model:
print("Running Binary EP Classification Model")
sess = tf.Session()
#sess = tf_debug.LocalCLIDebugWrapperSession(sess)
# ^ if debugging run with this uncommented and ` python -m ep_classification_demo --debug`
m2 = ep_approximated_like.EPLikeApproxGP(Xtrain, Ytrain, kern=gpflow.kernels.RBF(2),
likelihood=gpflow.likelihoods.Bernoulli(),
use_cache_on_like=False)
m2.compile(session=sess)
tau_tilde, nu_tilde, num_iter = m2.run_ep()