Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_nongpr_model(self, domain):
design = gpflowopt.design.LatinHyperCube(16, domain)
X, Y = design.generate(), parabola2d(design.generate())
m = gpflow.models.VGP(X, Y, gpflow.kernels.RBF(2, ARD=True), likelihood=gpflow.likelihoods.Gaussian())
acq = gpflowopt.acquisition.ExpectedImprovement(m)
optimizer = gpflowopt.BayesianOptimizer(domain, acq, optimizer=gpflowopt.optim.SciPyOptimizer(domain))
result = optimizer.optimize(lambda X: parabola2d(X), n_iter=1)
assert result.success
def test_deriv_rbf_kernel_x1_and_x2_different_lengthscales(self):
# this test is mainly about testing our rbf derivative kernel implementation
# when the lengthscales vary along the different dimensions.
# to do this we test the result against the basic derivative kernel
# where the gradients are calculated via tf.gradients.
x_ph = tf.placeholder(tf.float64, [None, 4])
x2_ph = tf.placeholder(tf.float64, [None, 4])
lengthscales = np.array([1.8, 0.9])
base_rbf_kern1 = gpflow.kernels.RBF(2, self.variance, lengthscales=lengthscales,
ARD=True)
base_rbf_kern2 = gpflow.kernels.RBF(2, self.variance, lengthscales=lengthscales,
ARD=True)
diff_dynamic_kernel = gpflow.derivative_kernel.DifferentialObservationsKernelDynamic(
2, base_rbf_kern1, 2
)
diff_kernel = gpflow.derivative_kernel.RBFDerivativeKern(2, 2, base_kernel=base_rbf_kern2)
with self.test_session() as sess:
with diff_kernel.tf_mode():
x_free = tf.placeholder('float64')
diff_kernel.make_tf_array(x_free)
k = diff_kernel.K(x_ph, x2_ph)
with diff_dynamic_kernel.tf_mode():
x_free_2 = tf.placeholder('float64')
diff_dynamic_kernel.make_tf_array(x_free_2)
def create_parabola_model(domain, design=None):
if design is None:
design = gpflowopt.design.LatinHyperCube(16, domain)
X, Y = design.generate(), parabola2d(design.generate())
m = gpflow.gpr.GPR(X, Y, gpflow.kernels.RBF(2, ARD=True))
return m
def test_mixed_mok_with_Id_vs_independent_mok(session_tf):
data = DataMixedKernelWithEye
# Independent model
k1 = mk.SharedIndependentMok(RBF(data.D, variance=0.5, lengthscales=1.2), data.L)
f1 = InducingPoints(data.X[:data.M, ...].copy())
m1 = SVGP(data.X, data.Y, k1, Gaussian(), f1,
q_mu=data.mu_data_full, q_sqrt=data.sqrt_data_full)
m1.set_trainable(False)
m1.q_sqrt.set_trainable(True)
gpflow.training.ScipyOptimizer().minimize(m1, maxiter=data.MAXITER)
# Mixed Model
kern_list = [RBF(data.D, variance=0.5, lengthscales=1.2) for _ in range(data.L)]
k2 = mk.SeparateMixedMok(kern_list, data.W)
f2 = InducingPoints(data.X[:data.M, ...].copy())
m2 = SVGP(data.X, data.Y, k2, Gaussian(), f2,
q_mu=data.mu_data_full, q_sqrt=data.sqrt_data_full)
m2.set_trainable(False)
m2.q_sqrt.set_trainable(True)
gpflow.training.ScipyOptimizer().minimize(m2, maxiter=data.MAXITER)
check_equality_predictions(session_tf, [m1, m2])
perm = list(range(30))
rng.shuffle(perm)
Xtest = rng.rand(10, 2) * 10
X_augumented = np.hstack([np.concatenate(X), np.concatenate(label)])
Y_augumented = np.hstack([np.concatenate(Y), np.concatenate(label)])
# 1. Two independent VGPs for two sets of data
k0 = gpflow.kernels.RBF(2)
k0.lengthscales.trainable = False
vgp0 = gpflow.models.VGP(
X[0], Y[0], kern=k0,
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
k1 = gpflow.kernels.RBF(2)
k1.lengthscales.trainable = False
vgp1 = gpflow.models.VGP(
X[1], Y[1], kern=k1,
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
# 2. Coregionalized GPR
lik = gpflow.likelihoods.SwitchedLikelihood(
[gpflow.likelihoods.Gaussian(), gpflow.likelihoods.Gaussian()])
kc = gpflow.kernels.RBF(2)
kc.trainable = False # lengthscale and variance is fixed.
coreg = gpflow.kernels.Coregion(1, output_dim=2, rank=1, active_dims=[2])
coreg.W.trainable = False
def prepare(self):
rng = np.random.RandomState(0)
X = rng.rand(20, 1) * 10
Y = np.sin(X) + 0.9 * np.cos(X * 1.6) + rng.randn(*X.shape) * 0.8
Y = np.tile(Y, 2) # two identical columns
self.Xtest = rng.rand(10, 1) * 10
m1 = gpflow.models.GPR(
X, Y, kern=gpflow.kernels.RBF(1),
mean_function=gpflow.mean_functions.Constant())
m2 = gpflow.models.VGP(
X, Y, gpflow.kernels.RBF(1), likelihood=gpflow.likelihoods.Gaussian(),
mean_function=gpflow.mean_functions.Constant())
m3 = gpflow.models.SVGP(
X, Y, gpflow.kernels.RBF(1),
likelihood=gpflow.likelihoods.Gaussian(),
Z=X.copy(),
q_diag=False,
mean_function=gpflow.mean_functions.Constant())
m3.feature.trainable = False
m4 = gpflow.models.SVGP(
X, Y, gpflow.kernels.RBF(1),
likelihood=gpflow.likelihoods.Gaussian(),
Z=X.copy(), q_diag=False, whiten=True,
mean_function=gpflow.mean_functions.Constant())
m4.feature.trainable = False
m5 = gpflow.models.SGPR(
X, Y, gpflow.kernels.RBF(1),
Z=X.copy(),
mean_function=gpflow.mean_functions.Constant())
def setup_sgpr():
X = np.random.randn(1000, 3)
Y = np.random.randn(1000, 3)
Z = np.random.randn(100, 3)
return gpflow.models.SGPR(X, Y, Z=Z, kern=gpflow.kernels.RBF(3))
def test_scalar(self):
with self.test_context():
k1 = gpflow.kernels.RBF(3, lengthscales=2.3)
k2 = gpflow.kernels.RBF(3, lengthscales=np.ones(3) * 2.3, ARD=True)
k1_lengthscales = k1.lengthscales.read_value()
k2_lengthscales = k2.lengthscales.read_value()
self.assertTrue(np.all(k1_lengthscales == k2_lengthscales))
def prepare(self):
rng = np.random.RandomState(0)
X = [rng.rand(10, 2) * 10, rng.rand(20, 2) * 10]
Y = [np.sin(x) + 0.9 * np.cos(x * 1.6) + rng.randn(*x.shape) * 0.8 for x in X]
label = [np.zeros((10, 1)), np.ones((20, 1))]
perm = list(range(30))
rng.shuffle(perm)
Xtest = rng.rand(10, 2) * 10
X_augumented = np.hstack([np.concatenate(X), np.concatenate(label)])
Y_augumented = np.hstack([np.concatenate(Y), np.concatenate(label)])
# 1. Two independent VGPs for two sets of data
k0 = gpflow.kernels.RBF(2)
k0.lengthscales.trainable = False
vgp0 = gpflow.models.VGP(
X[0], Y[0], kern=k0,
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
k1 = gpflow.kernels.RBF(2)
k1.lengthscales.trainable = False
vgp1 = gpflow.models.VGP(
X[1], Y[1], kern=k1,
mean_function=gpflow.mean_functions.Constant(),
likelihood=gpflow.likelihoods.Gaussian())
# 2. Coregionalized GPR
lik = gpflow.likelihoods.SwitchedLikelihood(
[gpflow.likelihoods.Gaussian(), gpflow.likelihoods.Gaussian()])
# plt.ylabel('vx_out')
# plt.figure()
# plt.plot(x_train[:,4],y_train[:,4],'kx',mew=2)
# plt.xlabel('vy_in')
# plt.ylabel('vy_out')
# plt.figure()
# plt.plot(x_train[:,5],y_train[:,5],'kx',mew=2)
# plt.xlabel('vz_in')
# plt.ylabel('vz_out')
# plt.draw()
# Build model
print('Building model...')
k1 = gp.kernels.RBF(input_dim=1,variance=1,lengthscales=1)
k2 = gp.kernels.RBF(input_dim=x_dim,variance=1,lengthscales=1)
# meanf = gp.mean_functions.Linear(1,0)
meanf = gp.mean_functions.Zero()
# likelihood = gp.likelihoods.Gaussian()
gp_models = []
m_full = gp.gpr.GPR(x_train,y_train,kern=k2,mean_function=meanf)
# to_train = [3]
to_train = [0,1,2,3,4,5]
for i in to_train:
x = x_train[:,i].reshape((n_train,1))
y = y_train[:,i].reshape((n_train,1))
m = gp.gpr.GPR(x,y,kern=k1,mean_function=meanf)
gp_models.append(m)