Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def _create_initial_point(self, Ts, E, *args):
if CensoringType.is_right_censoring(self):
log_T = np.log(Ts[0])
elif CensoringType.is_left_censoring(self):
log_T = np.log(Ts[1])
elif CensoringType.is_interval_censoring(self):
log_T = np.log(Ts[1])
return np.array([np.median(log_T), 1.0])
def _create_initial_point(self, Ts, E, entries, weights, Xs):
# detect constant columns
constant_col = (Xs.df.var(0) < 1e-8).idxmax()
import lifelines
uni_model = lifelines.GeneralizedGammaFitter()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
if utils.CensoringType.is_right_censoring(self):
uni_model.fit_right_censoring(Ts[0], event_observed=E, entry=entries, weights=weights)
elif utils.CensoringType.is_interval_censoring(self):
uni_model.fit_interval_censoring(Ts[0], Ts[1], event_observed=E, entry=entries, weights=weights)
elif utils.CensoringType.is_left_censoring(self):
uni_model.fit_left_censoring(Ts[1], event_observed=E, entry=entries, weights=weights)
# we may use this later in print_summary
self._ll_null_ = uni_model.log_likelihood_
d = {}
d["mu_"] = np.array([0.0] * (len(Xs.mappings["mu_"])))
if constant_col in Xs.mappings["mu_"]:
d["mu_"][Xs.mappings["mu_"].index(constant_col)] = uni_model.mu_
d["sigma_"] = np.array([0.0] * (len(Xs.mappings["sigma_"])))
if constant_col in Xs.mappings["mu_"]:
d["sigma_"][Xs.mappings["sigma_"].index(constant_col)] = uni_model.ln_sigma_
def _fit_model(self, Ts, E, entry, weights, show_progress=True):
if utils.CensoringType.is_left_censoring(self):
negative_log_likelihood = self._negative_log_likelihood_left_censoring
elif utils.CensoringType.is_interval_censoring(self):
negative_log_likelihood = self._negative_log_likelihood_interval_censoring
elif utils.CensoringType.is_right_censoring(self):
negative_log_likelihood = self._negative_log_likelihood_right_censoring
with warnings.catch_warnings():
warnings.simplefilter("ignore")
results = minimize(
value_and_grad(negative_log_likelihood), # pylint: disable=no-value-for-parameter
self._initial_values,
jac=True,
method=self._scipy_fit_method,
args=(Ts, E, entry, weights),
bounds=self._bounds,
options={**{"disp": show_progress}, **self._scipy_fit_options},
)
ax = plt.gca()
if timeline is None:
timeline = model.timeline
COL_EMP = "empirical CDF"
if CensoringType.is_left_censoring(model):
empirical_kmf = KaplanMeierFitter().fit_left_censoring(
model.durations, model.event_observed, label=COL_EMP, timeline=timeline
)
elif CensoringType.is_right_censoring(model):
empirical_kmf = KaplanMeierFitter().fit_right_censoring(
model.durations, model.event_observed, label=COL_EMP, timeline=timeline
)
elif CensoringType.is_interval_censoring(model):
raise NotImplementedError("lifelines does not have a non-parametric interval model yet.")
empirical_kmf.plot_cumulative_density(ax=ax, **plot_kwargs)
dist = get_distribution_name_of_lifelines_model(model)
dist_object = create_scipy_stats_model_from_lifelines_model(model)
ax.plot(timeline, dist_object.cdf(timeline), label="fitted %s" % dist, **plot_kwargs)
ax.legend()
return ax
def _ll_null(self):
if hasattr(self, "_ll_null_"):
return self._ll_null_
initial_point = np.zeros(len(self._fitted_parameter_names))
model = self.__class__(breakpoints=self.breakpoints[:-1], penalizer=self.penalizer)
regressors = {param_name: ["_intercept"] for param_name in self._fitted_parameter_names}
if CensoringType.is_right_censoring(self):
df = pd.DataFrame({"T": self.durations, "E": self.event_observed, "entry": self.entry, "_intercept": 1.0})
model.fit_right_censoring(
df, "T", "E", initial_point=initial_point, entry_col="entry", regressors=regressors
)
elif CensoringType.is_interval_censoring(self):
df = pd.DataFrame(
{
"lb": self.lower_bound,
"ub": self.upper_bound,
"E": self.event_observed,
"entry": self.entry,
"_intercept": 1.0,
}
)
model.fit_interval_censoring(
df, "lb", "ub", "E", initial_point=initial_point, entry_col="entry", regressors=regressors
)
if CensoringType.is_left_censoring(self):
raise NotImplementedError()
self._ll_null_ = model.log_likelihood_