Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
assert self._is_setup, "You forgot to setup the sampler!"
loud = not quiet
self._update_free_parameters()
n_dim = len(list(self._free_parameters.keys()))
# Get starting point
p0 = self._get_starting_points(self._n_walkers)
# Deactivate memoization in astromodels, which is useless in this case since we will never use twice the
# same set of parameters
with use_astromodels_memoization(False):
if using_mpi:
with MPIPoolExecutor() as executor:
sampler = zeus.sampler(
logprob_fn=self.get_posterior,
nwalkers=self._n_walkers,
ndim=n_dim,
pool=executor,
)
# if self._seed is not None:
# sampler._random.seed(self._seed)
else:
sampler_class = dynesty.NestedSampler
self._update_free_parameters()
n_dim = len(self._free_parameters.keys())
sampling_procedure = sample_without_progress
# dynesty uses a different call signiture for
# sampling so we construct callbakcs
loglike, dynesty_prior = self._construct_dynesty_posterior()
with use_astromodels_memoization(False):
if threeML_config['parallel']['use-parallel']:
c = ParallelClient()
view = c[:]
## remap the map_sync
pool = DynestyPool(view)
dynesty_kwargs['pool'] = pool
# we let the use setup the pool args
# create the class
self._sampler = sampler_class(loglike, dynesty_prior, ndim=n_dim, **dynesty_kwargs)
calculate the best or mean fit of the new function or
quantity
:return:
"""
# if there are independent variables
if self._independent_variable_range:
variates = []
# scroll through the independent variables
n_iterations = np.product(self._out_shape)
with progress_bar(n_iterations, title="Propagating errors") as p:
with use_astromodels_memoization(False):
for variables in itertools.product(
*self._independent_variable_range
):
variates.append(self._propagated_function(*variables))
p.increase()
# otherwise just evaluate
else:
variates = self._propagated_function()
# create a variates container
self._propagated_variates = VariatesContainer(
if not os.path.exists(mcmc_chains_out_dir):
os.makedirs(mcmc_chains_out_dir)
# Multinest must be run parallel via an external method
# see the demo in the examples folder!!
if threeML_config["parallel"]["use-parallel"]:
raise RuntimeError(
"If you want to run multinest in parallell you need to use an ad-hoc method"
)
else:
with use_astromodels_memoization(False):
sampler = pymultinest.run(
loglike, multinest_prior, n_dim, n_dim, **self._kwargs
)
# Use PyMULTINEST analyzer to gather parameter info
process_fit = False
if using_mpi:
# if we are running in parallel and this is not the
# first engine, then we want to wait and let everything finish
if rank != 0:
assert self._is_setup, "You forgot to setup the sampler!"
loud = not quiet
self._update_free_parameters()
n_dim = len(list(self._free_parameters.keys()))
# Get starting point
p0 = emcee.State(self._get_starting_points(self._n_walkers))
# Deactivate memoization in astromodels, which is useless in this case since we will never use twice the
# same set of parameters
with use_astromodels_memoization(False):
if threeML_config["parallel"]["use-parallel"]:
c = ParallelClient()
view = c[:]
sampler = emcee.EnsembleSampler(
self._n_walkers, n_dim, self.get_posterior, pool=view
)
else:
sampler = emcee.EnsembleSampler(
self._n_walkers, n_dim, self.get_posterior
)
# check if we are doing to do things in parallel
if threeML_config["parallel"]["use-parallel"]:
c = ParallelClient()
view = c[:]
self._kwargs["pool"] = view
self._kwargs["queue_size"] = len(view)
sampler = NestedSampler(loglike, dynesty_prior, **self._kwargs)
self._sampler_kwargs["print_progress"] = loud
with use_astromodels_memoization(False):
sampler.run_nested(**self._sampler_kwargs)
self._sampler = sampler
results = self._sampler.results
# draw posterior samples
weights = np.exp(results["logwt"] - results["logz"][-1])
SQRTEPS = math.sqrt(float(np.finfo(np.float64).eps))
rstate = np.random
if abs(np.sum(weights) - 1.0) > SQRTEPS: # same tol as in np.random.choice.
raise ValueError("Weights do not sum to 1.")
def sample_nestle(self, quiet=False, progress=True, method='single', **kwargs):
self._update_free_parameters()
n_dim = len(self._free_parameters.keys())
sampling_procedure = sample_without_progress
# nestle the sample method as dynesty
# sampling so we construct callbakcs
loglike, nestle_prior = self._construct_dynesty_posterior()
with use_astromodels_memoization(False):
results = nestle.sample(loglike, nestle_prior, n_dim, method = method, **kwargs)
# re-scale weights to have a maximum of one
nweights = results.weights/np.max(results.weights)
# get the probability of keeping a sample from the weights
keepidx = np.where(np.random.rand(len(nweights)) < nweights)[0]
# get the posterior samples
samples_nestle = results.samples[keepidx,:]
self._raw_samples = samples_nestle
self._log_like_values = results.logl[keepidx]
"""
self._update_free_parameters()
n_dim = len(self._free_parameters.keys())
# Get starting point
p0 = self._get_starting_points(n_walkers)
sampling_procedure = sample_with_progress
# Deactivate memoization in astromodels, which is useless in this case since we will never use twice the
# same set of parameters
with use_astromodels_memoization(False):
if threeML_config['parallel']['use-parallel']:
c = ParallelClient()
view = c[:]
sampler = emcee.EnsembleSampler(n_walkers, n_dim, self.get_posterior, pool=view)
# Sampling with progress in parallel is super-slow, so let's
# use the non-interactive one
sampling_procedure = sample_without_progress
else:
sampler = emcee.EnsembleSampler(n_walkers, n_dim, self.get_posterior)
def sample(self, *args, **kwargs):
self.likelihood_model.test.spectrum.main.shape.reset_tracking()
self.likelihood_model.test.spectrum.main.shape.start_tracking()
with use_astromodels_memoization(False):
try:
super(BayesianAnalysisWrap, self).sample(*args, **kwargs)
except:
raise
finally:
self.likelihood_model.test.spectrum.main.shape.stop_tracking()