How to use the ngboost.scores.CRPScore function in ngboost

To help you get started, we’ve selected a few ngboost examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github stanfordmlgroup / ngboost / figures / vis_crps.py View on Github external
    crps_fn = lambda p: manifold(CRPScore, Normal)(np.array(p)[:, np.newaxis]).score(rvs).mean()
    metric_fn = lambda p: manifold(CRPScore, Normal)(np.array(p)[:, np.newaxis]).metric()
github stanfordmlgroup / ngboost / figures / vis_crps.py View on Github external
    grad_fn = lambda p: manifold(CRPScore, Normal)(np.array(p)[:, np.newaxis]).d_score(rvs).mean(axis=0)
github stanfordmlgroup / ngboost / ngboost / distns / normal.py View on Github external
return -self.dist.logpdf(Y)

    def d_score(self, Y):
        D = np.zeros((len(Y), 2))
        D[:, 0] = (self.loc - Y) / self.var
        D[:, 1] = 1 - ((self.loc - Y) ** 2) / self.var
        return D

    def metric(self):
        FI = np.zeros((self.var.shape[0], 2, 2))
        FI[:, 0, 0] = 1 / self.var + 1e-5
        FI[:, 1, 1] = 2
        return FI


class NormalCRPScore(CRPScore):
    def score(self, Y):
        Z = (Y - self.loc) / self.scale
        return self.scale * (
            Z * (2 * sp.stats.norm.cdf(Z) - 1)
            + 2 * sp.stats.norm.pdf(Z)
            - 1 / np.sqrt(np.pi)
        )

    def d_score(self, Y):
        Z = (Y - self.loc) / self.scale
        D = np.zeros((len(Y), 2))
        D[:, 0] = -(2 * sp.stats.norm.cdf(Z) - 1)
        D[:, 1] = self.score(Y) + (Y - self.loc) * D[:, 0]
        return D

    def metric(self):
github stanfordmlgroup / ngboost / ngboost / distns / exponential.py View on Github external
cens = (1 - E) * np.log(1 - self.dist.cdf(T) + eps)
        uncens = E * self.dist.logpdf(T)
        return -(cens + uncens)

    def d_score(self, Y):
        E, T = Y["Event"], Y["Time"]
        cens = (1 - E) * T.squeeze() / self.scale
        uncens = E * (-1 + T.squeeze() / self.scale)
        return -(cens + uncens).reshape((-1, 1))

    def metric(self):
        FI = np.ones_like(self.scale)
        return FI[:, np.newaxis, np.newaxis]


class ExponentialCRPScore(CRPScore):
    def score(self, Y):
        E, T = Y["Event"], Y["Time"]
        score = T + self.scale * (2 * np.exp(-T / self.scale) - 1.5)
        score[E == 1] -= (
            0.5 * self.scale[E == 1] * np.exp(-2 * T[E == 1] / self.scale[E == 1])
        )
        return score

    def d_score(self, Y):
        E, T = Y["Event"], Y["Time"]
        deriv = 2 * np.exp(-T / self.scale) * (self.scale + T) - 1.5 * self.scale
        deriv[E == 1] -= np.exp(-2 * T[E == 1] / self.scale[E == 1]) * (
            0.5 * self.scale[E == 1] - T[E == 1]
        )
        return deriv.reshape((-1, 1))
github stanfordmlgroup / ngboost / ngboost / distns / normal.py View on Github external
class NormalFixedVarLogScore(LogScore):
    def score(self, Y):
        return -self.dist.logpdf(Y)

    def d_score(self, Y):
        D = np.zeros((len(Y), 1))
        D[:, 0] = (self.loc - Y) / self.var
        return D

    def metric(self):
        FI = np.zeros((self.var.shape[0], 1, 1))
        FI[:, 0, 0] = 1 / self.var + 1e-5
        return FI


class NormalFixedVarCRPScore(CRPScore):
    def score(self, Y):
        Z = (Y - self.loc) / self.scale
        return self.scale * (
            Z * (2 * sp.stats.norm.cdf(Z) - 1)
            + 2 * sp.stats.norm.pdf(Z)
            - 1 / np.sqrt(np.pi)
        )

    def d_score(self, Y):
        Z = (Y - self.loc) / self.scale
        D = np.zeros((len(Y), 1))
        D[:, 0] = -(2 * sp.stats.norm.cdf(Z) - 1)
        return D

    def metric(self):
        I = np.c_[2 * np.ones_like(self.var)]
github stanfordmlgroup / ngboost / ngboost / distns / categorical.py View on Github external
FI = -np.einsum(
            "ji,ki->ijk", self.probs[1 : self.K_, :], self.probs[1 : self.K_, :]
        )
        d = np.einsum("jii->ij", FI)
        d[:] += self.probs[1 : self.K_, :]
        return FI

    # a test:
    # if k==j:
    #     a= FI[i,j,k] == self.probs[k,i] - self.probs[k,i]*self.probs[j,i]
    # else:
    #     a= FI[i,j,k] == -self.probs[k,i]*self.probs[j,i]
    # a


class CategoricalCRPScore(CRPScore):
    def score(self, Y):
        return np.sum((self.probs - np.eye(self.K_)[Y]) ** 2, axis=1)

    def d_score(self, Y):
        return None

    def metric(self):
        return None


def k_categorical(K):
    """
    Factory function that generates classes for K-class categorical distributions for NGBoost

    The generated distribution has two parameters, loc and scale, which are the mean and standard deviation, respectively.
    This distribution has both LogScore and CRPScore implemented for it.
github stanfordmlgroup / ngboost / ngboost / distns / lognormal.py View on Github external
D_cens[:, 1] = (
            -Z
            * sp.stats.norm.pdf(lT, loc=self.loc, scale=self.scale)
            / (1 - self.dist.cdf(T) + self.eps)
        )

        return (1 - E) * D_cens + E * D_uncens

    def metric(self):
        FI = np.zeros((self.loc.shape[0], 2, 2))
        FI[:, 0, 0] = 1 / (self.scale ** 2) + self.eps
        FI[:, 1, 1] = 2
        return FI


class LogNormalCRPScoreCensored(CRPScore):
    def score(self, Y):
        E = Y["Event"]
        T = Y["Time"]
        lT = np.log(T)
        Z = (lT - self.loc) / self.scale

        crps_uncens = self.scale * (
            Z * (2 * sp.stats.norm.cdf(Z) - 1)
            + 2 * sp.stats.norm.pdf(Z)
            - 1 / np.sqrt(np.pi)
        )
        crps_cens = self.scale * (
            Z * sp.stats.norm.cdf(Z) ** 2
            + 2 * sp.stats.norm.cdf(Z) * sp.stats.norm.pdf(Z)
            - sp.stats.norm.cdf(np.sqrt(2) * Z) / np.sqrt(np.pi)
        )