Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _setup_vmc():
g = nk.graph.Hypercube(length=8, n_dim=1)
hi = nk.hilbert.Spin(s=0.5, graph=g)
ma = nk.machine.RbmSpin(hilbert=hi, alpha=1)
ma.init_random_parameters(seed=SEED, sigma=0.01)
ha = nk.operator.Ising(hi, h=1.0)
sa = nk.sampler.MetropolisLocal(machine=ma)
sa.seed(SEED)
op = nk.optimizer.Sgd(learning_rate=0.1)
vmc = nk.variational.Vmc(
hamiltonian=ha, sampler=sa, optimizer=op, n_samples=500, diag_shift=0.01
)
# Add custom observable
X = [[0, 1], [1, 0]]
sx = nk.operator.LocalOperator(hi, [X] * 8, [[i] for i in range(8)])
vmc.add_observable(sx, "SigmaX")
return ma, vmc
import numpy as np
import pytest
from pytest import approx
samplers = {}
# TESTS FOR SPIN HILBERT
# Constructing a 1d lattice
g = nk.graph.Hypercube(length=6, n_dim=1)
# Hilbert space of spins from given graph
hi = nk.hilbert.Spin(s=0.5, graph=g)
ma = nk.machine.RbmSpin(hilbert=hi, alpha=1)
ma.init_random_parameters(seed=1234, sigma=0.2)
sa = nk.sampler.MetropolisLocal(machine=ma)
samplers["MetropolisLocal RbmSpin"] = sa
sa = nk.sampler.MetropolisLocalPt(machine=ma, n_replicas=4)
samplers["MetropolisLocalPt RbmSpin"] = sa
ha = nk.operator.Ising(hilbert=hi, h=1.0)
sa = nk.sampler.MetropolisHamiltonian(machine=ma, hamiltonian=ha)
samplers["MetropolisHamiltonian RbmSpin"] = sa
# Test with uniform probability
maz = nk.machine.RbmSpin(hilbert=hi, alpha=1)
maz.init_random_parameters(seed=1234, sigma=0)
sa = nk.sampler.MetropolisLocal(machine=maz)
samplers["MetropolisLocal RbmSpin ZeroPars"] = sa
mas = nk.machine.RbmSpinSymm(hilbert=hi, alpha=1)
sx = [[0, 1], [1, 0]]
sz = [[1, 0], [0, -1]]
ha = nk.operator.LocalOperator(hi)
for i in range(L):
ha += nk.operator.LocalOperator(hi, sx, [i])
ha += nk.operator.LocalOperator(hi, np.kron(sz, sz), [i, (i + 1) % L])
# RBM Spin Machine
ma = nk.machine.RbmSpinPhase(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma)
# Optimizer
op = nk.optimizer.AdaDelta()
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=300,
diag_shift=0.1,
use_iterative=True,
method='Sr')
gs.run(output_prefix='test', n_iter=3000)
# 1D Lattice
g = nk.graph.Hypercube(length=20, n_dim=1, pbc=True)
# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=0.5, graph=g)
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)
# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma, n_chains=8)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.1)
# Stochastic reconfiguration
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
method="Sr",
diag_shift=0.1,
)
gs.run(output_prefix="test", n_iter=300)
# 1D Periodic Lattice
g = nk.graph.Hypercube(length=12, n_dim=1, pbc=True)
# Boson Hilbert Space
hi = nk.hilbert.Boson(graph=g, n_max=3, n_bosons=12)
# Bose Hubbard Hamiltonian
ha = nk.operator.BoseHubbard(U=4.0, hilbert=hi)
# Jastrow Machine with Symmetry
ma = nk.machine.JastrowSymm(hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Sampler
sa = nk.sampler.MetropolisHamiltonian(machine=ma, hamiltonian=ha)
# Stochastic gradient descent optimization
op = nk.optimizer.Sgd(learning_rate=0.1)
# Variational Monte Carlo
vmc = nk.gs.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=10000,
niter_opt=4000,
diag_shift=5e-3,
use_iterative=False,
output_file='test')
vmc.run()
# 1D Periodic Lattice
g = nk.graph.Hypercube(length=12, n_dim=1, pbc=True)
# Boson Hilbert Space
hi = nk.hilbert.Boson(graph=g, n_max=3, n_bosons=12)
# Bose Hubbard Hamiltonian
ha = nk.operator.BoseHubbard(U=4.0, hilbert=hi)
# Jastrow Machine with Symmetry
ma = nk.machine.RbmSpinSymm(alpha=4, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Sampler
sa = nk.sampler.MetropolisHamiltonian(machine=ma, hamiltonian=ha)
# Stochastic gradient descent optimization
op = nk.optimizer.AdaMax()
# Variational Monte Carlo
vmc = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
optimizer=op,
n_samples=1000,
diag_shift=5e-3,
use_iterative=False,
method="Sr",
)
vmc.run(output_prefix="test", n_iter=4000)
# Custom Graph
g = nk.graph.CustomGraph(edge_colors)
# Spin based Hilbert Space
hi = nk.hilbert.Spin(s=0.5, total_sz=0.0, graph=g)
# Custom Hamiltonian operator
op = nk.operator.GraphOperator(hi, bondops=bond_operator, bondops_colors=bond_color)
# Restricted Boltzmann Machine
ma = nk.machine.RbmSpin(hi, alpha=1)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Sampler
sa = nk.sampler.MetropolisHamiltonianPt(machine=ma, hamiltonian=op, n_replicas=16)
# Optimizer
opt = nk.optimizer.Sgd(learning_rate=0.01)
# Variational Monte Carlo
gs = nk.variational.Vmc(
hamiltonian=op,
sampler=sa,
optimizer=opt,
n_samples=1000,
use_iterative=True,
method="Sr",
)
gs.run(output_prefix="test", n_iter=10000)
nk.layer.ConvolutionalHypercube(
length=L, n_dim=1, input_channels=1, output_channels=4, kernel_length=4
),
nk.layer.Lncosh(input_size=4 * L),
nk.layer.ConvolutionalHypercube(
length=4 * L, n_dim=1, input_channels=1, output_channels=2, kernel_length=4
),
nk.layer.Lncosh(input_size=4 * 2 * L),
)
# FFNN Machine
ma = nk.machine.FFNN(hi, layers)
ma.init_random_parameters(seed=1234, sigma=0.1)
# Sampler
sa = nk.sampler.MetropolisHamiltonian(machine=ma, hamiltonian=ha)
# Optimizer
op = nk.optimizer.Sgd(learning_rate=0.01)
# Variational Monte Carlo
gs = nk.variational.Vmc(
hamiltonian=ha, sampler=sa, optimizer=op, n_samples=1000, diag_shift=0.01
)
gs.run(output_prefix="ffnn_test", n_iter=300, save_params_every=10)
# 1D Lattice
g = nk.graph.Hypercube(length=20, n_dim=1, pbc=True)
# Hilbert space of spins on the graph
hi = nk.hilbert.Spin(s=0.5, graph=g)
# Ising spin hamiltonian
ha = nk.operator.Ising(h=1.0, hilbert=hi)
# RBM Spin Machine
ma = nk.machine.RbmSpin(alpha=1, hilbert=hi)
ma.init_random_parameters(seed=1234, sigma=0.01)
# Metropolis Local Sampling
sa = nk.sampler.MetropolisLocal(machine=ma, n_chains=8)
n_samples = 1000
samples = np.zeros((n_samples, sa.sample_shape[0], sa.sample_shape[1]))
for i, sample in enumerate(sa.samples(n_samples)):
samples[i] = sample
loc = np.empty(samples.shape[0:2], dtype=np.complex128)
def compute_locals(n_times):
for k in range(n_times):
for i, sample in enumerate(samples):
local_values(ha, ma, sample, out=loc[i])
middle_layer = (nk.layer.ConvolutionalHypercube(length=args.input_size,
n_dim=1,
input_channels=args.width,
output_channels=args.width,
kernel_length=args.kernel_size),
nk.layer.Lncosh(input_size=args.width * args.input_size))
middle_layers = middle_layer * (args.depth - 1)
first_layer = (nk.layer.ConvolutionalHypercube(length=args.input_size,
n_dim=1,
input_channels=1,
output_channels=args.width,
kernel_length=args.kernel_size),
nk.layer.Lncosh(input_size=args.width * args.input_size),)
ma = nk.machine.FFNN(hi, first_layer + middle_layers)
ma.init_random_parameters(seed=1234, sigma=0.1)
sa = nk.sampler.MetropolisHamiltonian(machine=ma, hamiltonian=ha)
op = nk.optimizer.Sgd(learning_rate=args.learning_rate)
method = 'Sr' if args.use_stochastic_reconfiguration else 'Gd'
gs = nk.variational.Vmc(
hamiltonian=ha,
sampler=sa,
method=method,
optimizer=op,
n_samples=args.batch_size,
use_iterative=args.use_iterative,
use_cholesky=args.use_cholesky,
diag_shift=10.0)
gs.run(output_prefix="ffnn_test", n_iter=5, save_params_every=5)
start_time = time.time()
gs.run(output_prefix="ffnn_test", n_iter=args.num_of_iterations, save_params_every=args.num_of_iterations)
end_time = time.time()
return end_time - start_time