Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _setup_vmc():
g = nk.graph.Hypercube(length=8, n_dim=1)
hi = nk.hilbert.Spin(s=0.5, graph=g)
ma = nk.machine.RbmSpin(hilbert=hi, alpha=1)
ma.init_random_parameters(seed=SEED, sigma=0.01)
ha = nk.operator.Ising(hi, h=1.0)
sa = nk.sampler.MetropolisLocal(machine=ma)
sa.seed(SEED)
op = nk.optimizer.Sgd(learning_rate=0.1)
vmc = nk.variational.Vmc(
hamiltonian=ha, sampler=sa, optimizer=op, n_samples=500, diag_shift=0.01
)
# Add custom observable
X = [[0, 1], [1, 0]]
sx = nk.operator.LocalOperator(hi, [X] * 8, [[i] for i in range(8)])
vmc.add_observable(sx, "SigmaX")
return ma, vmc
def test_vmc_functions():
ha, sx, ma, sampler, driver = _setup_vmc()
driver.advance(200)
state = ma.to_array()
exact_dist = np.abs(state) ** 2
for op, name, tol in (ha, "ha", 1e-6), (sx, "sx", 1e-2):
print("Testing expectation of op={}".format(name))
exact_locs = [vmc.local_value(op, ma, v) for v in ma.hilbert.states()]
exact_ex = np.sum(exact_dist * exact_locs).real
data = vmc.compute_samples(sampler, nsamples=10000, ndiscard=1000)
ex, lv = vmc.expectation(data, ma, op, return_locvals=True)
assert ex["Mean"] == approx(np.mean(lv).real, rel=tol)
assert ex["Mean"] == approx(exact_ex, rel=tol)
var = vmc.variance(data, ma, ha)
assert var["Mean"] == approx(0.0, abs=1e-7)
grad = vmc.gradient(data, ma, ha)
assert grad.shape == (ma.n_par,)
assert np.mean(np.abs(grad) ** 2) == approx(0.0, abs=1e-9)
data_without_logderivs = vmc.compute_samples(
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)
# Jastrow Machine
ma = nk.machine.Jastrow(hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=0.1,
method="Sr",
)
gs.run(output_prefix="test", n_iter=300)
# Custom Hamiltonian operator
op = nk.operator.GraphOperator(hi, bondops=bond_operator, bondops_colors=bond_color)
# Restricted Boltzmann Machine
ma = nk.machine.RbmSpin(hi, alpha=1)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Sampler
sa = nk.sampler.MetropolisHamiltonianPt(machine=ma, hamiltonian=op, n_replicas=16)
# Optimizer
opt = nk.optimizer.Sgd(learning_rate=0.01)
# Variational Monte Carlo
gs = nk.variational.Vmc(
hamiltonian=op,
sampler=sa,
optimizer=opt,
n_samples=1000,
use_iterative=True,
method="Sr",
)
gs.run(output_prefix="test", n_iter=10000)
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)
# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=0.1,
method='Sr')
gs.run(output_prefix='test', n_iter=500)
ha += nk.operator.LocalOperator(hi, sx, [i])
ha += nk.operator.LocalOperator(hi, np.kron(sz, sz), [i, (i + 1) % L])
# RBM Spin Machine
ma = nk.machine.RbmSpinPhase(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)
# Optimizer
op = nk.optimizer.AdaDelta()
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=300,
diag_shift=0.1,
use_iterative=True,
method='Sr')
gs.run(output_prefix='test', n_iter=3000)
# Ising spin hamiltonian at the critical point
ha = nk.operator.Ising(h=3.0, hilbert=hi)
# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=0.1,
method="Sr",
)
gs.run(output_prefix="test", n_iter=1000)
ha = nk.operator.Heisenberg(hilbert=hi)
# Symmetric RBM Spin Machine
ma = nk.machine.RbmSpinSymm(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Exchange Sampling
# Notice that this sampler exchanges two neighboring sites
# thus preservers the total magnetization
sa = nk.sampler.MetropolisExchange(machine=ma, graph=g)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.05)
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=0.1,
method="Sr",
)
gs.run(output_prefix="test", n_iter=300)
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)
# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=0.1,
method="Sr",
)
gs.run(output_prefix="test", n_iter=300)
op = nk.operator.LocalOperator(hi)
for mat, site in zip(mats, sites):
op += nk.operator.LocalOperator(hi, mat, site)
# Restricted Boltzmann Machine
ma = nk.machine.RbmSpin(hi, alpha=1)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Sampler
sa = nk.sampler.MetropolisHamiltonianPt(machine=ma, hamiltonian=op, n_replicas=16)
# Optimizer
opt = nk.optimizer.Sgd(learning_rate=0.01)
# Variational Monte Carlo
gs = nk.variational.Vmc(
hamiltonian=op,
sampler=sa,
optimizer=opt,
n_samples=1000,
use_iterative=True,
method="Sr",
)
gs.run(output_prefix="test", n_iter=10000)