How to use the exoplanet.PyMC3Sampler function in exoplanet

To help you get started, we’ve selected a few exoplanet examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github rodluger / starry / exoplanet_doppler.py View on Github external
coords = orbit.get_relative_position(t)
x = coords[0].eval()
y = coords[1].eval()
z = -coords[2].eval()
truths["rv"] = map.rv(xo=x, yo=y, zo=z, ro=truths["r"])

# Noise it
rv = truths["rv"] + truths["rv_err"] * np.random.randn(len(t))

# Plot it
plt.plot(t, rv, 'k.', alpha=0.3, ms=3)
plt.plot(t, truths["rv"])
plt.show()

# Sample it
sampler = xo.PyMC3Sampler(window=100, finish=200)

starry_op = starry.ops.DopplerMapOp(udeg=udeg)

with pm.Model() as model:

    inc = pm.Uniform("inc", 0, 180)
    obl = pm.Uniform("obl", -180, 180)
    alpha = pm.Uniform("alpha", 0, 1)
    veq = pm.Uniform("veq", 0, 10)
    period = pm.Uniform("period", 1.0, 100.0)
    t0 = pm.Uniform("t0", -1, 1)
    u = xo.distributions.QuadLimbDark("u", testval=np.array([0.3, 0.2]))
    r = pm.Uniform("r", 0.01, 0.25)
    b = pm.Uniform("b", 0, 1.25)

    # We're not fitting for theta
github rodluger / starry / scripts / exoplanet_inference.py View on Github external
pm.Deterministic("light_curve", light_curve)
    
    # In this line, we simulate the dataset that we will fit
    flux = xo.eval_in_model(light_curve)
    flux += ferr * np.random.randn(len(flux))
    
    # The likelihood function assuming known Gaussian uncertainty
    pm.Normal("obs", mu=light_curve, sd=ferr, observed=flux)
    
    # Fit for the maximum a posteriori parameters given the simuated
    # dataset
    map_soln = pm.find_MAP(start=model.test_point)


np.random.seed(42)
sampler = xo.PyMC3Sampler(window=100, finish=200)
with model:
    burnin = sampler.tune(tune=2500, start=map_soln, step_kwargs=dict(target_accept=0.9))
    trace = sampler.sample(draws=3000)
github dfm / exoplanet / paper / notebooks / scaling / scaling.py View on Github external
# Optimize
    map_soln = pm.find_MAP(start=model.test_point, vars=[trend])
    map_soln = pm.find_MAP(start=map_soln)


def check_convergence(samples):
    tau = emcee.autocorr.integrated_time(samples, tol=0)
    num = samples.shape[0] * samples.shape[1]
    converged = np.all(tau * target_n_eff < num)
    converged &= np.all(len(samples) > 50 * tau)
    return converged, num / tau


# Run the PyMC3 sampler
chains = 2
sampler = xo.PyMC3Sampler(start=500, finish=500, window=500)
with model:
    burnin = sampler.tune(
        tune=100000, start=map_soln, chains=chains, cores=1, progressbar=False
    )

tottime = 0
trace = None
with model:
    while True:
        strt = time.time()
        trace = sampler.sample(
            draws=2000, trace=trace, chains=chains, cores=1, progressbar=False
        )
        tottime += time.time() - strt

        samples = np.array(trace.get_values("P", combine=False))