Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
allValues = res[0].values()
else:
res = self.fit(False,True)
print res
allValues = map(lambda x:x.value,self.freeParameters.values())
print allValues
print self.minusLogLike(allValues)
pass
ntemps = 20
ndim = len(allValues)
#p0 = [numpy.array(allValues)*numpy.random.uniform(0.9,1.1,ndim) for i in range(nwalkers)]
p0 = numpy.random.uniform(0.9,1.1,size=(ntemps,nwalkers,ndim))*numpy.array(allValues)
self.sampler = emcee.PTSampler(ntemps,nwalkers, ndim, lnprob2,lnprior)
#self.sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob)
self.sampler.reset()
if(burn>0):
for p,lnprob,lnlike in self.sampler.sample(p0,iterations=burn):
pass
#r = self.sampler.run_mcmc(p0, burn)
self.sampler.reset()
else:
p = p0
pass
for p, lnprob,lnlike in self.sampler.sample(p, lnprob0=lnprob,lnlike0=lnlike,iterations=nsamplesPerWalker):
pass
#r = self.sampler.run_mcmc(p0, nsamplesPerWalker)
print("done")
def walk(problem, burn=100, steps=400, ntemps=30, maxtemp=None, dtemp=3.0,
npop=10, nthin=1, init='eps', state=None):
log_dtemp = np.log(dtemp) if maxtemp is None else np.log(maxtemp)/(ntemps-1)
betas = np.exp(-log_dtemp*np.arange(ntemps))
#betas = (np.linspace(ntemps, 1, ntemps)/ntemps)**5
p0 = problem.getp()
dim = len(p0)
nwalkers = npop*dim
bounds = problem.bounds()
log_prior = lambda p: 0 if ((p>=bounds[0])&(p<=bounds[1])).all() else -inf
log_likelihood = lambda p: -problem.nllf(p)
sampler = emcee.PTSampler(
ntemps=ntemps, nwalkers=nwalkers, dim=dim,
logl=log_likelihood, logp=log_prior,
betas=betas,
)
# initial population
if state is None:
pop = initpop.generate(problem, init=init, pop=npop*ntemps)
#lnprob, lnlike = None, None
else:
logp, samples = state
pop = samples[:,:,-1,:]
#lnprob, lnlike = logp[:,:,-1], logp[:,:,-1]
p = pop.reshape(ntemps, nwalkers, -1)
iteration = 0
def localize_emcee(
logl, loglargs, logp, logpargs, xmin, xmax,
nside=-1, chain_dump=None):
# Set up sampler
import emcee
from sky_area.sky_area_clustering import Clustered3DKDEPosterior
ntemps = 20
nwalkers = 100
nburnin = 1000
nthin = 10
niter = 10000 + nburnin
ndim = len(xmin)
sampler = emcee.PTSampler(
ntemps=ntemps, nwalkers=nwalkers, dim=ndim, logl=logl, logp=logp,
loglargs=loglargs, logpargs=logpargs)
# Draw initial state from multivariate uniform distribution
p0 = np.random.uniform(xmin, xmax, (ntemps, nwalkers, ndim))
# Collect samples. The .copy() is important because PTSampler.sample()
# reuses p on every iteration.
chain = np.vstack([
p[0, :, :].copy() for p, _, _
in itertools.islice(
sampler.sample(p0, iterations=niter, storechain=False),
nburnin, niter, nthin
)])
# Extract polar coordinates. For all likelihoodds, the first two parameters
Sample with parallel tempering
:param: n_temps
:param: n_walkers
:param: burn_in
:param: n_samples
:return: MCMC samples
"""
free_parameters = self._likelihood_model.free_parameters
n_dim = len(free_parameters.keys())
sampler = emcee.PTSampler(n_temps, n_walkers, n_dim, self._log_like, self._log_prior)
# Get one starting point for each temperature
p0 = np.empty((n_temps, n_walkers, n_dim))
for i in range(n_temps):
p0[i, :, :] = self._get_starting_points(n_walkers)
print("Running burn-in of %s samples...\n" % burn_in)
p, lnprob, lnlike = sample_with_progress("Burn-in", p0, sampler, burn_in)
# Reset sampler
sampler.reset()
if num_proc == 0:
num_proc = 1
ndim = len(self.free_params)
if sampler is None:
if sampler_type == 'ensemble':
sampler = emcee.EnsembleSampler(
nwalkers,
ndim,
_ComputeLnProbEval(self),
threads=num_proc,
a=sampler_a
)
elif sampler_type == 'pt':
# TODO: Finish this!
raise NotImplementedError("PTSampler not done yet!")
sampler = emcee.PTSampler(
ntemps,
nwalkers,
ndim,
logl,
logp
)
else:
raise NotImplementedError(
"Sampler type {:s} not supported!".format(sampler_type)
)
else:
sampler.a = sampler_a
if sampler.chain.size == 0:
theta0 = self.hyperprior.random_draw(size=nwalkers).T
theta0 = theta0[:, ~self.fixed_params]
else:
def samplePT( self, ntemps, nwalkers, burn_in, nsamples ):
'''
Sample with parallel tempering
'''
self.freeParameters = self.likelihoodModel.getFreeParameters()
ndim = len( self.freeParameters.keys() )
sampler = emcee.PTSampler( ntemps, nwalkers, ndim, self._logLike, self._logp )
#Get one starting point for each temperature
p0 = numpy.empty( ( ntemps, nwalkers, ndim ) )
for i in range( ntemps ):
p0[i,:,:] = self._getStartingPoint( nwalkers )
print("Running burn-in of %s samples...\n" % burn_in )
p, lnprob, lnlike = sampleWithProgress( p0, sampler, burn_in )
#Reset sampler
sampler.reset()
def emcee_sky_map(
logl, loglargs, logp, logpargs, xmin, xmax,
nside=-1, kde=False, chain_dump=None, max_horizon=1.0):
# Set up sampler
import emcee
ntemps = 20
nwalkers = 100
nburnin = 1000
nthin = 10
niter = 10000 + nburnin
ndim = len(xmin)
sampler = emcee.PTSampler(
ntemps=ntemps, nwalkers=nwalkers, dim=ndim, logl=logl, logp=logp,
loglargs=loglargs, logpargs=logpargs)
# Draw initial state from multivariate uniform distribution
p0 = np.random.uniform(xmin, xmax, (ntemps, nwalkers, ndim))
# Collect samples. The .copy() is important because PTSampler.sample()
# reuses p on every iteration.
chain = np.vstack([
p[0, :, :].copy() for p, _, _
in itertools.islice(
sampler.sample(p0, iterations=niter, storechain=False),
nburnin, niter, nthin
)])
# Extract polar coordinates. For all likelihoodds, the first two parameters
def emcee_sky_map(
logl, loglargs, logp, logpargs, xmin, xmax,
nside=-1, kde=False, chain_dump=None, max_horizon=1.0):
# Set up sampler
import emcee
ntemps = 20
nwalkers = 100
nburnin = 1000
nthin = 10
niter = 10000 + nburnin
ndim = len(xmin)
sampler = emcee.PTSampler(
ntemps=ntemps, nwalkers=nwalkers, dim=ndim, logl=logl, logp=logp,
loglargs=loglargs, logpargs=logpargs)
# Draw initial state from multivariate uniform distribution
p0 = np.random.uniform(xmin, xmax, (ntemps, nwalkers, ndim))
# Collect samples. The .copy() is important because PTSampler.sample()
# reuses p on every iteration.
chain = np.vstack([
p[0, :, :].copy() for p, _, _
in itertools.islice(
sampler.sample(p0, iterations=niter, storechain=False),
nburnin, niter, nthin
)])
# Extract polar coordinates. For all likelihoodds, the first two parameters