How to use the pypesto.objective.constants.HESS function in pypesto

To help you get started, we’ve selected a few pypesto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ICB-DCM / pyPESTO / test / test_history.py View on Github external
if isinstance(it_final, np.ndarray):
                it_final = it_final[0]
            it_start = int(np.where(np.logical_not(
                np.isnan(start.history.get_fval_trace())
            ))[0][0])
            assert np.allclose(
                xfull(start.history.get_x_trace(it_start)), start.x0)
            assert np.allclose(
                xfull(start.history.get_x_trace(it_final)), start.x)
            assert np.isclose(
                start.history.get_fval_trace(it_start), start.fval0)

        funs = {
            FVAL: self.obj.get_fval,
            GRAD: self.obj.get_grad,
            HESS: self.obj.get_hess,
            RES: self.obj.get_res,
            SRES: self.obj.get_sres,
            CHI2: lambda x: res_to_chi2(self.obj.get_res(x)),
            SCHI2: lambda x: sres_to_schi2(*self.obj(
                x, (0, 1,),
                pypesto.objective.constants.MODE_RES
            ))
        }
        for var, fun in funs.items():
            if not var == FVAL and not getattr(self.history_options,
                                               f'trace_record_{var}'):
                continue
            for it in range(5):
                x_full = xfull(start.history.get_x_trace(it))
                val = getattr(start.history, f'get_{var}_trace')(it)
                if np.all(np.isnan(val)):
github ICB-DCM / pyPESTO / pypesto / objective / amici_util.py View on Github external
"""Default output upon error.

    Returns values indicative of an error, that is with nan entries in all
    vectors, and a function value, i.e. nllh, of `np.inf`.
    """
    if not amici_model.nt():
        nt = sum([data.nt() for data in edatas])
    else:
        nt = sum([data.nt() if data.nt() else amici_model.nt()
                  for data in edatas])
    n_res = nt * amici_model.nytrue

    return {
        FVAL: np.inf,
        GRAD: np.nan * np.ones(dim),
        HESS: np.nan * np.ones([dim, dim]),
        RES:  np.nan * np.ones(n_res),
        SRES: np.nan * np.ones([n_res, dim]),
        RDATAS: rdatas
    }
github ICB-DCM / pyPESTO / pypesto / objective / objective.py View on Github external
"""
        Convert output tuple to dict.
        """
        output_dict = {}
        index = 0
        if not isinstance(output_tuple, tuple):
            output_tuple = (output_tuple,)
        if mode == MODE_FUN:
            if 0 in sensi_orders:
                output_dict[FVAL] = output_tuple[index]
                index += 1
            if 1 in sensi_orders:
                output_dict[GRAD] = output_tuple[index]
                index += 1
            if 2 in sensi_orders:
                output_dict[HESS] = output_tuple[index]
        elif mode == MODE_RES:
            if 0 in sensi_orders:
                output_dict[RES] = output_tuple[index]
                index += 1
            if 1 in sensi_orders:
                output_dict[SRES] = output_tuple[index]
        return output_dict
github ICB-DCM / pyPESTO / pypesto / hierarchical / calculator.py View on Github external
#    edatas,
            #    num_threads=min(n_threads, len(edatas)),
            #)
            sy = [rdata['sy'] for rdata in rdatas]
            snllh = self.inner_solver.calculate_gradients(self.inner_problem,
                                                          x_inner_opt,
                                                          sim,
                                                          sy,
                                                          parameter_mapping,
                                                          x_ids,
                                                          amici_model,
                                                          snllh)

        return {FVAL: nllh,
                GRAD: snllh,
                HESS: s2nllh,
                RES: res,
                SRES: sres,
                RDATAS: rdatas
                }
github ICB-DCM / pyPESTO / pypesto / hierarchical / calculator.py View on Github external
else:
            # WLS = np.sum([optimal_surrogate[i]['fun'] for i in range(len(optimal_surrogate))])
            # WLS = np.sqrt(WLS)
            WLS = compute_WLS(optimal_surrogate, self.problem, edatas, rdatas)

        print('cost function: ' + str(WLS))
        #TODO: gradient computation
        #if sensi_order > 0:
        #    snllh = compute_snllh(edatas, rdatas, optimal_scalings, obj.x_ids, obj.mapping_par_opt_to_par_sim, obj.dim)
        # TODO compute FIM or HESS
        # TODO RES, SRES should also be possible, right?

        return {
            FVAL: WLS,
            GRAD: snllh,
            HESS: s2nllh,
            RES: res,
            SRES: sres,
            RDATAS: rdatas
        }
github ICB-DCM / pyPESTO / pypesto / objective / pre_post_process.py View on Github external
def postprocess(self, result: Dict) -> Dict:
        """Constrain results to optimization parameter dimensions."""
        result = super().postprocess(result)

        if result.get(GRAD, None) is not None:
            grad = result[GRAD]
            if grad.size == self.dim_full:
                grad = grad[self.x_free_indices]
                result[GRAD] = grad
        if result.get(HESS, None) is not None:
            hess = result[HESS]
            if hess.shape[0] == self.dim_full:
                hess = hess[np.ix_(self.x_free_indices, self.x_free_indices)]
                result[HESS] = hess
        if result.get(SRES, None) is not None:
            sres = result[SRES]
            if sres.shape[-1] == self.dim_full:
                sres = sres[..., self.x_free_indices]
                result[SRES] = sres

        return result
github ICB-DCM / pyPESTO / pypesto / objective / priors.py View on Github external
def call_unprocessed(
            self,
            x: np.ndarray,
            sensi_orders: Tuple[int, ...],
            mode: str
    ) -> ResultDict:

        res = {}

        for order in sensi_orders:
            if order == 0:
                res[FVAL] = self.neg_log_density(x)
            elif order == 1:
                res[GRAD] = self.gradient_neg_log_density(x)
            elif order == 2:
                res[HESS] = self.hessian_neg_log_density(x)
            else:
                ValueError(f'Invalid sensi order {order}.')

        return res
github ICB-DCM / pyPESTO / pypesto / objective / history.py View on Github external
def get_hess_trace(
            self, ix: Union[int, Sequence[int], None] = None
    ) -> Union[Sequence[MaybeArray], MaybeArray]:
        return list(self._trace[(HESS, np.nan)].values[ix])
github ICB-DCM / pyPESTO / pypesto / objective / history.py View on Github external
def _update_vals(self,
                     x: np.ndarray,
                     result: ResultDict):
        """
        Update initial and best function values.
        """
        # update initial point
        if np.allclose(x, self.x0):
            if self.fval0 is None:
                self.fval0 = result.get(FVAL, None)
            self.x0 = x

        # update best point
        fval = result.get(FVAL, None)
        grad = result.get(GRAD, None)
        hess = result.get(HESS, None)
        res = result.get(RES, None)
        sres = result.get(SRES, None)

        if fval is not None and fval < self.fval_min:
            self.fval_min = fval
            self.x_min = x
            self.grad_min = grad
            self.hess_min = hess
            self.res_min = res
            self.sres_min = sres

        # sometimes sensitivities are evaluated on subsequent calls. We can
        # identify this situation by checking that x hasn't changed
        if np.all(self.x_min == x):
            if self.grad_min is None and grad is not None:
                self.grad_min = grad