Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
as_priors = ModelHelper.get_astropy_priors(priors)
if len(as_priors.keys()) > 0:
# If there are priors then is a Bayesian Parameters Estimation
max_post=True
fitmethod="BFGS"
else:
as_priors=None
logging.warn("fit_power_density_spectrum: can't create priors from dave_priors.")
if as_priors:
# Creates a Posterior object with the priors
lpost = PSDPosterior(pds.freq, pds.power, fit_model, priors=as_priors, m=pds.m)
else:
# Creates the Maximum Likelihood object for fitting
lpost = PSDLogLikelihood(pds.freq, pds.power, fit_model, m=pds.m)
# Creates the PSD Parameters Estimation object and runs the fitting
parest = PSDParEst(pds, fitmethod=fitmethod, max_post=max_post)
res = parest.fit(lpost, starting_pars, neg=True)
sample = None
if as_priors and sampling_params is not None:
# If is a Bayesian Par. Est. and has sampling parameters
# then sample the posterior distribution defined in `lpost` using MCMC
sample = parest.sample(lpost, res.p_opt, cov=res.cov,
nwalkers=sampling_params["nwalkers"],
niter=sampling_params["niter"],
burnin=sampling_params["burnin"],
threads=sampling_params["threads"],
print_results=False, plot=False)
def fit_data_with_lorentz_and_const(x_values, y_values):
amplitude=5.
x_0=1
fwhm=0.5
const=5.
g_init = Lorentz1D(amplitude, x_0, fwhm)
g_init += Const1D(const)
lpost = PSDLogLikelihood(x_values, y_values, g_init)
parest = ParameterEstimation()
res = parest.fit(lpost, [amplitude, x_0, fwhm, const], neg=True)
opt_amplitude = res.p_opt[0]
opt_x_0 = res.p_opt[1]
opt_fwhm = res.p_opt[2]
opt_const = res.p_opt[3]
return opt_amplitude, opt_x_0, opt_fwhm, opt_const
def fit_data_with_gaussian(x_values, y_values, amplitude=1., mean=0, stddev=1.):
g_init = Gaussian1D(amplitude, mean, stddev)
lpost = PSDLogLikelihood(x_values, y_values, g_init)
parest = ParameterEstimation()
res = parest.fit(lpost, [amplitude, mean, stddev], neg=True)
opt_amplitude = res.p_opt[0]
opt_mean = res.p_opt[1]
opt_stddev = res.p_opt[2]
return opt_amplitude, opt_mean, opt_stddev
max_post=True
fitmethod="BFGS"
else:
as_priors=None
logging.warn("fit_power_density_spectrum: can't create priors from dave_priors.")
if as_priors:
# Creates a Posterior object with the priors
lpost = PSDPosterior(pds.freq, pds.power, fit_model, priors=as_priors, m=pds.m)
else:
# Creates the Maximum Likelihood object for fitting
lpost = PSDLogLikelihood(pds.freq, pds.power, fit_model, m=pds.m)
# Creates the PSD Parameters Estimation object and runs the fitting
parest = PSDParEst(pds, fitmethod=fitmethod, max_post=max_post)
res = parest.fit(lpost, starting_pars, neg=True)
sample = None
if as_priors and sampling_params is not None:
# If is a Bayesian Par. Est. and has sampling parameters
# then sample the posterior distribution defined in `lpost` using MCMC
sample = parest.sample(lpost, res.p_opt, cov=res.cov,
nwalkers=sampling_params["nwalkers"],
niter=sampling_params["niter"],
burnin=sampling_params["burnin"],
threads=sampling_params["threads"],
print_results=False, plot=False)
# Prepares the results to be returned to GUI
fixed = [fit_model.fixed[n] for n in fit_model.param_names]
parnames = [n for n, f in zip(fit_model.param_names, fixed) \
if priors is not None:
# Creates the priors from dave_priors
as_priors = ModelHelper.get_astropy_priors(priors)
if len(as_priors.keys()) > 0:
# If there are priors then is a Bayesian Parameters Estimation
max_post=True
fitmethod="BFGS"
else:
as_priors=None
logging.warn("fit_power_density_spectrum: can't create priors from dave_priors.")
if as_priors:
# Creates a Posterior object with the priors
lpost = PSDPosterior(pds.freq, pds.power, fit_model, priors=as_priors, m=pds.m)
else:
# Creates the Maximum Likelihood object for fitting
lpost = PSDLogLikelihood(pds.freq, pds.power, fit_model, m=pds.m)
# Creates the PSD Parameters Estimation object and runs the fitting
parest = PSDParEst(pds, fitmethod=fitmethod, max_post=max_post)
res = parest.fit(lpost, starting_pars, neg=True)
sample = None
if as_priors and sampling_params is not None:
# If is a Bayesian Par. Est. and has sampling parameters
# then sample the posterior distribution defined in `lpost` using MCMC
sample = parest.sample(lpost, res.p_opt, cov=res.cov,
nwalkers=sampling_params["nwalkers"],
niter=sampling_params["niter"],
burnin=sampling_params["burnin"],
def fit_data_with_lorentz_and_const(x_values, y_values):
amplitude=5.
x_0=1
fwhm=0.5
const=5.
g_init = Lorentz1D(amplitude, x_0, fwhm)
g_init += Const1D(const)
lpost = PSDLogLikelihood(x_values, y_values, g_init)
parest = ParameterEstimation()
res = parest.fit(lpost, [amplitude, x_0, fwhm, const], neg=True)
opt_amplitude = res.p_opt[0]
opt_x_0 = res.p_opt[1]
opt_fwhm = res.p_opt[2]
opt_const = res.p_opt[3]
return opt_amplitude, opt_x_0, opt_fwhm, opt_const
def fit_data_with_gaussian(x_values, y_values, amplitude=1., mean=0, stddev=1.):
g_init = Gaussian1D(amplitude, mean, stddev)
lpost = PSDLogLikelihood(x_values, y_values, g_init)
parest = ParameterEstimation()
res = parest.fit(lpost, [amplitude, mean, stddev], neg=True)
opt_amplitude = res.p_opt[0]
opt_mean = res.p_opt[1]
opt_stddev = res.p_opt[2]
return opt_amplitude, opt_mean, opt_stddev
if (evt_list.time[evt_list.ncounts - 1] - evt_list.time[0]) >= dt:
lc = evt_list.to_lc(dt)
if lc and np.sqrt(lc.meancounts * lc.meancounts) > 0:
gti = base_gti
if not gti:
gti = lc.gti
if segm_size > lc.tseg:
segm_size = lc.tseg
logging.warn("get_rms_spectrum: range: " + str(energy_low) + " to " + str(energy_high) + ", segmsize bigger than lc.duration, lc.duration applied instead.")
pds = None
if pds_type == 'Sng':
pds = Powerspectrum(lc, norm=norm, gti=gti)
else:
pds = AveragedPowerspectrum(lc=lc, segment_size=segm_size, norm=norm, gti=gti)
if pds:
if df > 0:
pds = pds.rebin(df=df)
#amp, x0, fwhm, white_noise_offset = ModelHelper.fit_data_with_lorentz_and_const(pds.freq, pds.power)
#logging.info("get_rms_spectrum: amp: " + str(amp) + ", x0: " + str(x0) + ", fwhm: " + str(fwhm) + ", white_noise: " + str(white_noise))
if freq_range[0] < 0:
freq_low = min(pds.freq)
else:
freq_low = freq_range[0]
evt_list = EventList(filtered_event_list[:,0], pi=filtered_event_list[:,1])
if evt_list and evt_list.ncounts > 1:
if (evt_list.time[evt_list.ncounts - 1] - evt_list.time[0]) >= dt:
lc = evt_list.to_lc(dt)
if lc and np.sqrt(lc.meancounts * lc.meancounts) > 0:
rms, rms_err = 0, 0
gti = base_gti
if not gti:
gti = lc.gti
pds = Powerspectrum(lc, norm='frac', gti=gti)
if pds:
if df > 0:
pds = pds.rebin(df=df)
if len(pds.freq):
if freq_range[0] < 0:
freq_low = min(pds.freq)
else:
freq_low = freq_range[0]
if freq_min_max[0] >= 0:
freq_min_max[0] = min([freq_min_max[0], freq_low])
else:
freq_min_max[0] = freq_low
if evt_list and evt_list.ncounts > 1:
if (evt_list.time[evt_list.ncounts - 1] - evt_list.time[0]) >= dt:
lc = evt_list.to_lc(dt)
if lc and np.sqrt(lc.meancounts * lc.meancounts) > 0:
if not gti:
gti = lc.gti
if segm_size > lc.tseg:
segm_size = lc.tseg
logging.warn("get_white_noise_offset: segmsize bigger than lc.duration, lc.duration applied instead.")
pds = None
if pds_type == 'Sng':
pds = Powerspectrum(lc, norm='leahy', gti=gti)
else:
pds = AveragedPowerspectrum(lc=lc, segment_size=segm_size, norm='leahy', gti=gti)
if pds:
if df > 0:
pds = pds.rebin(df=df)
num_tries = 0
while white_noise_offset <= 0.0 and num_tries < 5:
amp, x0, fwhm, wno = ModelHelper.fit_data_with_lorentz_and_const(pds.freq, pds.power)
white_noise_offset = wno
num_tries += 1
return white_noise_offset