Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_reload(backend, dtype):
with backend() as backend1:
run_sampler(backend1, dtype=dtype)
# Test the state
state = backend1.random_state
np.random.set_state(state)
# Load the file using a new backend object.
backend2 = backends.HDFBackend(backend1.filename, backend1.name,
read_only=True)
with pytest.raises(RuntimeError):
backend2.reset(32, 3)
assert state[0] == backend2.random_state[0]
assert all(np.allclose(a, b)
for a, b in zip(state[1:], backend2.random_state[1:]))
# Check all of the components.
for k in ["chain", "log_prob", "blobs"]:
a = backend1.get_value(k)
b = backend2.get_value(k)
_custom_allclose(a, b)
last1 = backend1.get_last_sample()
def MCMC_fit(outdir, theta_0, init_err, bounds, params, fitkeys, settings, continue_old_run=False):
global data
ndim = len(theta_0)
#::: set up a backend
backend = emcee.backends.HDFBackend(os.path.join(outdir,'save.h5'))
#::: continue on the backend / reset the backend
if not continue_old_run:
backend.reset(settings['nwalkers'], ndim)
#::: helper fct
def run_mcmc(sampler):
#::: set initial walker positions
if continue_old_run:
p0 = backend.get_chain()[-1,:,:]
already_completed_steps = backend.get_chain().shape[0] * settings['thin_by']
else:
p0 = theta_0 + init_err * np.random.randn(settings['nwalkers'], ndim)
args=(init_params, init_m, stimes,
smags, serrs, priorbounds),
pool=pool,
backend=backend
)
sampler.run_mcmc(starting_positions, n_mcmc_steps,
progress=mcmcprogressbar)
if verbose:
LOGINFO(
'ended {:s} MCMC run with {:d} steps, {:d} walkers, '.format(
fittype, n_mcmc_steps, n_walkers
) + '{:d} threads'.format(nworkers)
)
reader = emcee.backends.HDFBackend(samplesavpath)
n_to_discard = int(burninpercent*n_mcmc_steps)
samples = reader.get_chain(discard=n_to_discard, flat=True)
# Get best-fit parameters and their 1-sigma error bars
fit_statistics = list(
map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
list(zip( *np.percentile(samples, [15.85, 50, 84.15], axis=0))))
)
medianparams, std_perrs, std_merrs = {}, {}, {}
for ix, k in enumerate(np.sort(list(priorbounds.keys()))):
medianparams[k] = fit_statistics[ix][0]
std_perrs[k] = fit_statistics[ix][1]
std_merrs[k] = fit_statistics[ix][2]
p0 = np.random.normal(self.eparams, size=(nwalkers, ndim))
except:
p0 = []
# p0 is a (nwalkers, ndim) array
amp = [10.0, 150]
p0.append(np.random.uniform(0.01, 1.0, nwalkers))
for i in range(self.m):
p0 += [np.random.uniform(amp[0], amp[1], nwalkers)]
for s, r in self.priors:
# Draw randomly from the gamma priors
p0 += [np.random.gamma(s, 1. / r, nwalkers)]
p0 = np.array(p0).T
backend = emcee.backends.HDFBackend(filename)
# If we want to start from scratch need to reset backend
if not resume:
backend.reset(nwalkers, ndim)
f = partial(_ln_posterior, gparams=self.gparams, PhiPhi=self.PhiPhi, w_hat=self.w_hat, M=self.M, m=self.m,
priors=self.priors, verbose=False)
if mp.cpu_count() > max_cpus:
processes = max_cpus
else:
processes = mp.cpu_count()
with mp.Pool(processes=processes) as pool:
sampler = emcee.EnsembleSampler(nwalkers, ndim, f, backend=backend, pool=pool)
# Set up loop for auto-checking the correlation
old_tau = np.inf
if (overwrite == '1'):
continue_old_run = False
elif (overwrite == '2'):
continue_old_run = True
else:
raise ValueError('User aborted operation.')
#::: continue on the backend / reset the backend
if os.path.exists(os.path.join(config.BASEMENT.outdir,'mcmc_save.h5')) and not continue_old_run:
#backend.reset(config.BASEMENT.settings['mcmc_nwalkers'], config.BASEMENT.ndim)
os.remove(os.path.join(config.BASEMENT.outdir,'mcmc_save.h5'))
#::: set up a fresh backend
backend = emcee.backends.HDFBackend(os.path.join(config.BASEMENT.outdir,'mcmc_save.h5'))
#::: helper fct
def run_mcmc(sampler):
#::: set initial walker positions
if continue_old_run:
p0 = backend.get_chain()[-1,:,:]
already_completed_steps = backend.get_chain().shape[0] * config.BASEMENT.settings['mcmc_thin_by']
else:
p0 = config.BASEMENT.theta_0 + config.BASEMENT.init_err * np.random.randn(config.BASEMENT.settings['mcmc_nwalkers'], config.BASEMENT.ndim)
already_completed_steps = 0
#::: make sure the inital positions are within the limits
for i, b in enumerate(config.BASEMENT.bounds):
if b[0] == 'uniform':
try:
self.external_priors = config.BASEMENT.external_priors
except:
pass
# try:
if os.path.exists( os.path.join(config.BASEMENT.outdir,'save_ns.pickle.gz') ):
f = gzip.GzipFile(os.path.join(config.BASEMENT.outdir,'save_ns.pickle.gz'), 'rb')
results = pickle.load(f)
f.close()
self.posterior_samples = nested_sampling_output.draw_ns_posterior_samples(results) # all weighted posterior_samples
self.posterior_params = nested_sampling_output.draw_ns_posterior_samples(results, as_type='dic') # all weighted posterior_samples
self.posterior_params_median, self.posterior_params_ll, self.posterior_params_ul = general_output.get_params_from_samples(self.posterior_samples)
elif os.path.exists( os.path.join(config.BASEMENT.outdir,'mcmc_save.h5') ):
copyfile(os.path.join(config.BASEMENT.outdir,'mcmc_save.h5'), os.path.join(config.BASEMENT.outdir,'mcmc_save_tmp.h5'))
reader = emcee.backends.HDFBackend( os.path.join(config.BASEMENT.outdir,'mcmc_save_tmp.h5'), read_only=True )
self.posterior_samples = draw_mcmc_posterior_samples(reader) # all weighted posterior_samples
self.posterior_params = draw_mcmc_posterior_samples(reader, as_type='dic') # all weighted posterior_samples
self.posterior_samples_at_maximum_likelihood = draw_mcmc_posterior_samples_at_maximum_likelihood(reader)
self.posterior_params_at_maximum_likelihood = draw_mcmc_posterior_samples_at_maximum_likelihood(reader, as_type='dic')
self.posterior_params_median, self.posterior_params_ll, self.posterior_params_ul = general_output.get_params_from_samples(self.posterior_samples)
os.remove(os.path.join(config.BASEMENT.outdir,'mcmc_save_tmp.h5'))
elif os.path.exists( os.path.join(config.BASEMENT.outdir,'ns_derived_samples.pickle') ):
self.posterior_derived_params = pickle.load(open(os.path.join(datadir,'ns_derived_samples.pickle'),'rb'))
elif os.path.exists( os.path.join(config.BASEMENT.outdir,'mcmc_derived_samples.pickle') ):
self.posterior_derived_params = pickle.load(open(os.path.join(datadir,'mcmc_derived_samples.pickle'),'rb'))