Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
vgp1 = gpflow.models.VGP((Datum.X[1], Datum.Y[1]),
kernel=k1,
mean_function=Constant(),
likelihood=gpflow.likelihoods.Gaussian(), num_latent=1)
# 2. Coregionalized GPR
kc = gpflow.kernels.SquaredExponential(active_dims=[0, 1])
kc.lengthscale.trainable = False
kc.variance.trainable = False # variance is handles by the coregion kernel
coreg = gpflow.kernels.Coregion(output_dim=2, rank=1, active_dims=[2])
coreg.W.trainable = False
lik = gpflow.likelihoods.SwitchedLikelihood([gpflow.likelihoods.Gaussian(),
gpflow.likelihoods.Gaussian()]
)
mean_c = gpflow.mean_functions.SwitchedMeanFunction(
[gpflow.mean_functions.Constant(), gpflow.mean_functions.Constant()])
cvgp = gpflow.models.VGP((Datum.X_augumented, Datum.Y_augumented),
kernel=kc * coreg,
mean_function=mean_c,
likelihood=lik,
num_latent=1
)
# Train them for a small number of iterations
opt = gpflow.optimizers.Scipy()
@tf.function(autograph=False)
def vgp0_closure():
return - vgp0.log_marginal_likelihood()
@tf.function(autograph=False)
def vgp1_closure():
Xtest = rng.rand(10, 2) * 10
X_augumented = np.hstack([np.concatenate(X), np.concatenate(label)])
Y_augumented = np.hstack([np.concatenate(Y), np.concatenate(label)])
# 1. Two independent VGPs for two sets of data
k0 = gpflow.kernels.RBF(2)
k0.lengthscales.trainable = False
vgp0 = gpflow.models.VGP(
X[0], Y[0], kern=k0,
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
k1 = gpflow.kernels.RBF(2)
k1.lengthscales.trainable = False
vgp1 = gpflow.models.VGP(
X[1], Y[1], kern=k1,
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
# 2. Coregionalized GPR
lik = gpflow.likelihoods.SwitchedLikelihood(
[gpflow.likelihoods.Gaussian(), gpflow.likelihoods.Gaussian()])
kc = gpflow.kernels.RBF(2)
kc.trainable = False # lengthscale and variance is fixed.
coreg = gpflow.kernels.Coregion(1, output_dim=2, rank=1, active_dims=[2])
coreg.W.trainable = False
mean_c = gpflow.mean_functions.SwitchedMeanFunction(
[gpflow.mean_functions.Constant(), gpflow.mean_functions.Constant()])
def gpr_and_vgp(data, kernel, likelihood):
vgp = gpflow.models.VGP(data, kernel, likelihood)
gpr = gpflow.models.GPR(data, kernel)
gpr.likelihood.variance.assign(likelihood.variance)
set_trainable(vgp, False)
vgp.q_mu.trainable = True
vgp.q_sqrt.trainable = True
return gpr, vgp
def _prepare_models():
"""
Prepare models to make sure the coregionalized model with diagonal coregion kernel and
with fixed lengthscale is equivalent with normal GP regression.
"""
# 1. Two independent VGPs for two sets of data
k0 = gpflow.kernels.SquaredExponential()
k0.lengthscale.trainable = False
k1 = gpflow.kernels.SquaredExponential()
k1.lengthscale.trainable = False
vgp0 = gpflow.models.VGP((Datum.X[0], Datum.Y[0]),
kernel=k0,
mean_function=Constant(),
likelihood=gpflow.likelihoods.Gaussian(), num_latent=1)
vgp1 = gpflow.models.VGP((Datum.X[1], Datum.Y[1]),
kernel=k1,
mean_function=Constant(),
likelihood=gpflow.likelihoods.Gaussian(), num_latent=1)
# 2. Coregionalized GPR
kc = gpflow.kernels.SquaredExponential(active_dims=[0, 1])
kc.lengthscale.trainable = False
kc.variance.trainable = False # variance is handles by the coregion kernel
coreg = gpflow.kernels.Coregion(output_dim=2, rank=1, active_dims=[2])
coreg.W.trainable = False
lik = gpflow.likelihoods.SwitchedLikelihood([gpflow.likelihoods.Gaussian(),
gpflow.likelihoods.Gaussian()]
)
def prepare(self):
return gpflow.models.VGP(
self.X, self.Y, kern=self.kernel(),
likelihood=gpflow.likelihoods.Gaussian())
def _prepare_models():
"""
Prepare models to make sure the coregionalized model with diagonal coregion kernel and
with fixed lengthscale is equivalent with normal GP regression.
"""
# 1. Two independent VGPs for two sets of data
k0 = gpflow.kernels.SquaredExponential()
k0.lengthscale.trainable = False
k1 = gpflow.kernels.SquaredExponential()
k1.lengthscale.trainable = False
vgp0 = gpflow.models.VGP((Datum.X[0], Datum.Y[0]),
kernel=k0,
mean_function=Constant(),
likelihood=gpflow.likelihoods.Gaussian(), num_latent=1)
vgp1 = gpflow.models.VGP((Datum.X[1], Datum.Y[1]),
kernel=k1,
mean_function=Constant(),
likelihood=gpflow.likelihoods.Gaussian(), num_latent=1)
# 2. Coregionalized GPR
kc = gpflow.kernels.SquaredExponential(active_dims=[0, 1])
kc.lengthscale.trainable = False
kc.variance.trainable = False # variance is handles by the coregion kernel
coreg = gpflow.kernels.Coregion(output_dim=2, rank=1, active_dims=[2])
coreg.W.trainable = False
lik = gpflow.likelihoods.SwitchedLikelihood([gpflow.likelihoods.Gaussian(),
gpflow.likelihoods.Gaussian()]
)
mean_c = gpflow.mean_functions.SwitchedMeanFunction(
[gpflow.mean_functions.Constant(), gpflow.mean_functions.Constant()])
cvgp = gpflow.models.VGP((Datum.X_augumented, Datum.Y_augumented),
kernel=kc * coreg,
def test_correct_num_latent(self):
with self.test_context():
m = gpflow.models.VGP(self.X, self.Y, kern=gpflow.kernels.Matern12(1),
likelihood=self.switched_likelihood, num_latent=1)
m.compute_log_likelihood() # should compute something!
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
# 2. Coregionalized GPR
lik = gpflow.likelihoods.SwitchedLikelihood(
[gpflow.likelihoods.Gaussian(), gpflow.likelihoods.Gaussian()])
kc = gpflow.kernels.RBF(2)
kc.trainable = False # lengthscale and variance is fixed.
coreg = gpflow.kernels.Coregion(1, output_dim=2, rank=1, active_dims=[2])
coreg.W.trainable = False
mean_c = gpflow.mean_functions.SwitchedMeanFunction(
[gpflow.mean_functions.Constant(), gpflow.mean_functions.Constant()])
cvgp = gpflow.models.VGP(
X_augumented, Y_augumented,
kern=kc * coreg,
mean_function=mean_c,
likelihood=lik,
num_latent=2)
return vgp0, vgp1, cvgp, Xtest
def test_other_XiTransform_VGP_vs_GPR(session_tf, xi_transform=XiSqrtMeanVar()):
"""
With other transforms the solution is not given in a single step, but it should still give the same answer
after a number of smaller steps.
"""
N, D = 3, 2
X = np.random.randn(N, D)
Y = np.random.randn(N, 1)
kern = gpflow.kernels.RBF(D)
lik_var = 0.1
lik = gpflow.likelihoods.Gaussian()
lik.variance = lik_var
m_vgp = gpflow.models.VGP(X, Y, kern, lik)
m_gpr = gpflow.models.GPR(X, Y, kern)
m_gpr.likelihood.variance = lik_var
m_vgp.set_trainable(False)
m_vgp.q_mu.set_trainable(True)
m_vgp.q_sqrt.set_trainable(True)
NatGradOptimizer(0.01).minimize(m_vgp, [[m_vgp.q_mu, m_vgp.q_sqrt, xi_transform]], maxiter=500)
assert_allclose(m_gpr.compute_log_likelihood(),
m_vgp.compute_log_likelihood(), atol=1e-4)
def test_vgp_vs_svgp(self):
with self.test_context():
N, Ns, DX, DY = 100, 10, 2, 2
np.random.seed(1)
X = np.random.randn(N, DX)
Xs = np.random.randn(Ns, DX)
Y = np.random.randn(N, DY)
kernel = gpflow.kernels.Matern52(DX)
likelihood = gpflow.likelihoods.StudentT()
m_svgp = gpflow.models.SVGP(
X, Y, kernel, likelihood, X.copy(), whiten=True, q_diag=False)
m_vgp = gpflow.models.VGP(X, Y, kernel, likelihood)
m_svgp.compile()
m_vgp.compile()
q_mu = np.random.randn(N, DY)
q_sqrt = np.random.randn(DY, N, N)
m_svgp.q_mu = q_mu
m_svgp.q_sqrt = q_sqrt
m_vgp.q_mu = q_mu
m_vgp.q_sqrt = q_sqrt
L_svgp = m_svgp.compute_log_likelihood()
L_vgp = m_vgp.compute_log_likelihood()
assert_allclose(L_svgp, L_vgp, rtol=1e-2)