Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_forward_no_collapse(self):
for dtype in (torch.float, torch.double):
# no resample
sampler = SobolQMCNormalSampler(
num_samples=4, seed=1234, collapse_batch_dims=False
)
self.assertFalse(sampler.resample)
self.assertEqual(sampler.seed, 1234)
self.assertFalse(sampler.collapse_batch_dims)
# check samples non-batched
posterior = _get_test_posterior(device=self.device, dtype=dtype)
samples = sampler(posterior)
self.assertEqual(samples.shape, torch.Size([4, 2, 1]))
self.assertEqual(sampler.seed, 1235)
# ensure samples are the same
samples2 = sampler(posterior)
self.assertTrue(torch.allclose(samples, samples2))
self.assertEqual(sampler.seed, 1235)
# ensure this works with a differently shaped posterior
posterior_batched = _get_test_posterior_batched(
acqf(X.expand(2, 1, 1))
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, no resample
sampler = SobolQMCNormalSampler(num_samples=2)
acqf = qExpectedImprovement(model=mm, best_f=0, sampler=sampler)
res = acqf(X)
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, resample
sampler = SobolQMCNormalSampler(num_samples=2, resample=True)
acqf = qExpectedImprovement(model=mm, best_f=0, sampler=sampler)
res = acqf(X) # 1-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertFalse(torch.equal(acqf.sampler.base_samples, bs))
res = acqf(X.expand(2, 1, 1)) # 2-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
# the base samples should have the batch dim collapsed
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X.expand(2, 1, 1))
self.assertFalse(torch.equal(acqf.sampler.base_samples, bs))
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
res = acqf(X.expand(2, 1, 1)) # 2-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
# the base samples should have the batch dim collapsed
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X.expand(2, 1, 1))
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, no resample
sampler = SobolQMCNormalSampler(num_samples=2)
acqf = qSimpleRegret(model=mm, sampler=sampler)
res = acqf(X)
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, resample
sampler = SobolQMCNormalSampler(num_samples=2, resample=True)
acqf = qSimpleRegret(model=mm, sampler=sampler)
res = acqf(X) # 1-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
new_y = f(unnormalize(new_x, bounds=BOUNDS))
# update training points
train_x = torch.cat((train_x, new_x))
train_y = torch.cat((train_y, new_y))
random_candidate = torch.rand(1, dtype=dtype, device=device)
next_random_best = f(unnormalize(random_candidate, bounds=BOUNDS)).max().item()
best_random.append(max(best_random[-1], next_random_best))
# get the new best observed value
best_value = train_y.max().item()
best_rgpe.append(best_value)
# Run Vanilla EI for comparison
vanilla_ei_model = get_fitted_model(vanilla_ei_train_x, vanilla_ei_train_y.view(-1))
vanilla_ei_sampler = SobolQMCNormalSampler(num_samples=MC_SAMPLES)
vanilla_qEI = qExpectedImprovement(
model=vanilla_ei_model,
best_f=vanilla_ei_best_value,
sampler=vanilla_ei_sampler,
)
vanilla_ei_candidate = optimize_acqf(
acq_function=vanilla_qEI,
bounds=torch.tensor([[0.],[1.]], dtype=dtype, device=device),
q=Q_BATCH_SIZE,
num_restarts=N_RESTARTS,
raw_samples=N_RESTART_CANDIDATES,
)
# fetch the new values
vanilla_ei_new_x = vanilla_ei_candidate.detach()
vanilla_ei_new_y = f(unnormalize(vanilla_ei_new_x, bounds=BOUNDS))
X_pending=X_pending,
)
self.assertEqual(qKG.num_fantasies, 16)
self.assertEqual(qKG.sampler, sampler)
self.assertEqual(qKG.sampler.sample_shape, torch.Size([16]))
self.assertEqual(qKG.objective, obj)
self.assertIsInstance(qKG.inner_sampler, SobolQMCNormalSampler)
self.assertEqual(qKG.inner_sampler.sample_shape, torch.Size([128]))
self.assertTrue(torch.equal(qKG.X_pending, X_pending))
self.assertIsNone(qKG.current_value)
self.assertEqual(qKG.get_augmented_q_batch_size(q=3), 16 + 3)
# test assignment of num_fantasies from sampler if not provided
qKG = qKnowledgeGradient(model=mm, num_fantasies=None, sampler=sampler)
self.assertEqual(qKG.sampler.sample_shape, torch.Size([16]))
# test custom construction with inner sampler and current value
inner_sampler = SobolQMCNormalSampler(num_samples=256)
current_value = torch.zeros(1, device=self.device, dtype=dtype)
qKG = qKnowledgeGradient(
model=mm,
num_fantasies=8,
objective=obj,
inner_sampler=inner_sampler,
current_value=current_value,
)
self.assertEqual(qKG.num_fantasies, 8)
self.assertEqual(qKG.sampler.sample_shape, torch.Size([8]))
self.assertEqual(qKG.objective, obj)
self.assertIsInstance(qKG.inner_sampler, SobolQMCNormalSampler)
self.assertEqual(qKG.inner_sampler, inner_sampler)
self.assertIsNone(qKG.X_pending)
self.assertTrue(torch.equal(qKG.current_value, current_value))
self.assertEqual(qKG.get_augmented_q_batch_size(q=3), 8 + 3)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
res = acqf(X.expand(2, 1, 1)) # 2-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
# the base samples should have the batch dim collapsed
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X.expand(2, 1, 1))
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, no resample
sampler = SobolQMCNormalSampler(num_samples=2)
acqf = qExpectedImprovement(model=mm, best_f=0, sampler=sampler)
res = acqf(X)
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, resample
sampler = SobolQMCNormalSampler(num_samples=2, resample=True)
acqf = qExpectedImprovement(model=mm, best_f=0, sampler=sampler)
res = acqf(X) # 1-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
res = acqf(X.expand(2, 1, 1)) # 2-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
# the base samples should have the batch dim collapsed
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X.expand(2, 1, 1))
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, no resample
sampler = SobolQMCNormalSampler(num_samples=2)
acqf = qUpperConfidenceBound(model=mm, beta=0.5, sampler=sampler)
res = acqf(X)
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# test batch mode, qmc, resample
sampler = SobolQMCNormalSampler(num_samples=2, resample=True)
acqf = qUpperConfidenceBound(model=mm, beta=0.5, sampler=sampler)
res = acqf(X) # 1-dim batch
self.assertEqual(res[0].item(), 1.0)
self.assertEqual(res[1].item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 2, 1]))
bs = acqf.sampler.base_samples.clone()
res = acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# basic test, qmc, no resample
sampler = SobolQMCNormalSampler(num_samples=2)
acqf = qUpperConfidenceBound(model=mm, beta=0.5, sampler=sampler)
res = acqf(X)
self.assertEqual(res.item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 1, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertTrue(torch.equal(acqf.sampler.base_samples, bs))
# basic test, qmc, resample
sampler = SobolQMCNormalSampler(num_samples=2, resample=True)
acqf = qUpperConfidenceBound(model=mm, beta=0.5, sampler=sampler)
res = acqf(X)
self.assertEqual(res.item(), 0.0)
self.assertEqual(acqf.sampler.base_samples.shape, torch.Size([2, 1, 1, 1]))
bs = acqf.sampler.base_samples.clone()
acqf(X)
self.assertFalse(torch.equal(acqf.sampler.base_samples, bs))
# basic test for X_pending and warning
acqf.set_X_pending()
self.assertIsNone(acqf.X_pending)
acqf.set_X_pending(None)
self.assertIsNone(acqf.X_pending)
acqf.set_X_pending(X)
self.assertEqual(acqf.X_pending, X)
res = acqf(X)
mc_samples=self.mc_samples,
seed=self.seed,
beta=0.3,
)
self.assertTrue(acqf == mock_acqf.return_value)
mock_acqf.assert_called_once_with(
model=self.model,
beta=0.3,
sampler=mock.ANY,
objective=self.objective,
X_pending=self.X_pending,
)
args, kwargs = mock_acqf.call_args
self.assertEqual(args, ())
sampler = kwargs["sampler"]
self.assertIsInstance(sampler, SobolQMCNormalSampler)
self.assertEqual(sampler.sample_shape, torch.Size([self.mc_samples]))
self.assertEqual(sampler.seed, 1)
self.assertTrue(torch.equal(kwargs["X_pending"], self.X_pending))
# test with different tau, non-qmc
acqf = get_acquisition_function(
acquisition_function_name="qUCB",
model=self.model,
objective=self.objective,
X_observed=self.X_observed,
X_pending=self.X_pending,
mc_samples=self.mc_samples,
qmc=False,
seed=2,
beta=0.2,
)
self.assertTrue(mock_acqf.call_count, 2)
A `n' x d` with subset of points in `X`, where
n' = min(N_nz, ceil(max_frac * n))
with `N_nz` the number of points in `X` that have non-zero (empirical,
under `num_samples` samples) probability of being the best point.
"""
if X.ndim > 2:
# TODO: support batched inputs (req. dealing with ragged tensors)
raise UnsupportedError(
"Batched inputs `X` are currently unsupported by prune_inferior_points"
)
max_points = math.ceil(max_frac * X.size(-2))
if max_points < 1 or max_points > X.size(-2):
raise ValueError(f"max_frac must take values in (0, 1], is {max_frac}")
sampler = SobolQMCNormalSampler(num_samples=num_samples)
with torch.no_grad():
posterior = model.posterior(X=X)
samples = sampler(posterior)
if objective is None:
objective = IdentityMCObjective()
obj_vals = objective(samples)
if obj_vals.ndim > 2:
# TODO: support batched inputs (req. dealing with ragged tensors)
raise UnsupportedError(
"Batched models are currently unsupported by prune_inferior_points"
)
is_best = torch.argmax(obj_vals, dim=-1)
idcs, counts = torch.unique(is_best, return_counts=True)
if len(idcs) > max_points:
counts, order_idcs = torch.sort(counts, descending=True)