Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
val, fun(x_full),
), var
elif var in [RES]:
# note that we can expect slight deviations here since
# this res is computed without sensitivities while the
# result here may be computed with with sensitivies
# activated. If this fails to often, increase atol/rtol
assert np.allclose(
val, fun(x_full),
rtol=1e-3, atol=1e-4
), var
elif var in [SRES]:
assert np.allclose(
val, fun(x_full)[:, self.problem.x_free_indices],
), var
elif var in [GRAD, SCHI2]:
assert np.allclose(
val, self.problem.get_reduced_vector(fun(x_full)),
), var
elif var in [HESS]:
assert np.allclose(
val, self.problem.get_reduced_matrix(fun(x_full)),
), var
else:
raise RuntimeError('missing test implementation')
def call_unprocessed(
self,
x: np.ndarray,
sensi_orders: Tuple[int, ...],
mode: str
) -> ResultDict:
res = {}
for order in sensi_orders:
if order == 0:
res[FVAL] = self.neg_log_density(x)
elif order == 1:
res[GRAD] = self.gradient_neg_log_density(x)
elif order == 2:
res[HESS] = self.hessian_neg_log_density(x)
else:
ValueError(f'Invalid sensi order {order}.')
return res
result = self.fun(x, sensi_orders)
if not isinstance(result, dict):
result = Objective.output_to_dict(
sensi_orders, MODE_FUN, result)
elif sensi_orders == (0,):
if self.grad is True:
fval = self.fun(x)[0]
else:
fval = self.fun(x)
result = {FVAL: fval}
elif sensi_orders == (1,):
if self.grad is True:
grad = self.fun(x)[1]
else:
grad = self.grad(x)
result = {GRAD: grad}
elif sensi_orders == (2,):
if self.hess is True:
hess = self.fun(x)[2]
else:
hess = self.hess(x)
result = {HESS: hess}
elif sensi_orders == (0, 1):
if self.grad is True:
fval, grad = self.fun(x)[0:2]
else:
fval = self.fun(x)
grad = self.grad(x)
result = {FVAL: fval,
GRAD: grad}
elif sensi_orders == (1, 2):
if self.hess is True:
print('Inner optimization not succeeded')
else:
# WLS = np.sum([optimal_surrogate[i]['fun'] for i in range(len(optimal_surrogate))])
# WLS = np.sqrt(WLS)
WLS = compute_WLS(optimal_surrogate, self.problem, edatas, rdatas)
print('cost function: ' + str(WLS))
#TODO: gradient computation
#if sensi_order > 0:
# snllh = compute_snllh(edatas, rdatas, optimal_scalings, obj.x_ids, obj.mapping_par_opt_to_par_sim, obj.dim)
# TODO compute FIM or HESS
# TODO RES, SRES should also be possible, right?
return {
FVAL: WLS,
GRAD: snllh,
HESS: s2nllh,
RES: res,
SRES: sres,
RDATAS: rdatas
}
dim: int):
"""Default output upon error.
Returns values indicative of an error, that is with nan entries in all
vectors, and a function value, i.e. nllh, of `np.inf`.
"""
if not amici_model.nt():
nt = sum([data.nt() for data in edatas])
else:
nt = sum([data.nt() if data.nt() else amici_model.nt()
for data in edatas])
n_res = nt * amici_model.nytrue
return {
FVAL: np.inf,
GRAD: np.nan * np.ones(dim),
HESS: np.nan * np.ones([dim, dim]),
RES: np.nan * np.ones(n_res),
SRES: np.nan * np.ones([n_res, dim]),
RDATAS: rdatas
}
profile_index:
array with parameter indices, whether a profile should
be computed (1) or not (0).
Default is all profiles should be computed.
profile_list:
integer which specifies whether a call to the profiler should
create a new list of profiles (default) or should be added to a
specific profile list.
problem_dimension:
number of parameters in the unreduced problem.
global_opt:
log-posterior at global optimum.
"""
if optimizer_result[GRAD] is not None:
gradnorm = np.linalg.norm(optimizer_result[GRAD])
else:
gradnorm = None
# create blank profile
new_profile = ProfilerResult(
x_path=optimizer_result["x"],
fval_path=np.array([optimizer_result["fval"]]),
ratio_path=np.array([np.exp(global_opt - optimizer_result["fval"])]),
gradnorm_path=gradnorm,
exitflag_path=optimizer_result["exitflag"],
time_path=np.array([0.]),
time_total=0.,
n_fval=0,
n_grad=0,
n_hess=0,
message=None)
if res.size else rdata['res']
if sensi_order > 0:
opt_sres = sim_sres_to_opt_sres(
x_ids,
par_sim_ids,
condition_map_sim_var,
rdata['sres'],
coefficient=1.0
)
sres = np.vstack([sres, opt_sres]) \
if sres.size else opt_sres
ret = {
FVAL: nllh,
CHI2: chi2,
GRAD: snllh,
HESS: s2nllh,
RES: res,
SRES: sres,
RDATAS: rdatas
}
return {
key: val
for key, val in ret.items()
if val is not None
}
x_next = create_next_guess(x_now, i_par, par_direction,
options, current_profile, problem,
global_opt)
# fix current profiling parameter to current value and set
# start point
problem.fix_parameters(i_par, x_next[i_par])
startpoint = np.array([x_next[i] for i in problem.x_free_indices])
# run optimization
# IMPORTANT: This optimization will need a proper exception
# handling (coming soon)
optimizer_result = optimizer.minimize(problem, startpoint, '0',
allow_failed_starts=False)
if optimizer_result[GRAD] is not None:
gradnorm = np.linalg.norm(optimizer_result[GRAD][
problem.x_free_indices])
else:
gradnorm = None
current_profile.append_profile_point(
x=optimizer_result.x,
fval=optimizer_result.fval,
ratio=np.exp(global_opt - optimizer_result.fval),
gradnorm=gradnorm,
time=optimizer_result.time,
exitflag=optimizer_result.exitflag,
n_fval=optimizer_result.n_fval,
n_grad=optimizer_result.n_grad,
n_hess=optimizer_result.n_hess)
# free the profiling parameter again
def _init_trace(self, x: np.ndarray):
"""
Initialize the trace.
"""
if self.x_names is None:
self.x_names = [f'x{i}' for i, _ in enumerate(x)]
columns: List[Tuple] = [
(c, float('nan')) for c in [
TIME, N_FVAL, N_GRAD, N_HESS, N_RES, N_SRES,
FVAL, CHI2, RES, SRES, HESS,
]
]
for var in [X, GRAD, SCHI2]:
if var == 'x' or self.options[f'trace_record_{var}']:
columns.extend([
(var, x_name)
for x_name in self.x_names
])
else:
columns.extend([(var,)])
# TODO: multi-index for res, sres, hess
self._trace = pd.DataFrame(columns=pd.MultiIndex.from_tuples(columns),
dtype='float64')
# only non-float64
trace_dtypes = {
RES: 'object',
# amici_solver,
# edatas,
# num_threads=min(n_threads, len(edatas)),
#)
sy = [rdata['sy'] for rdata in rdatas]
snllh = self.inner_solver.calculate_gradients(self.inner_problem,
x_inner_opt,
sim,
sy,
parameter_mapping,
x_ids,
amici_model,
snllh)
return {FVAL: nllh,
GRAD: snllh,
HESS: s2nllh,
RES: res,
SRES: sres,
RDATAS: rdatas
}