How to use reliability - 10 common examples

To help you get started, we’ve selected a few reliability examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Weibull_3P. The fit from Scipy was used instead so the results may not be accurate.')
            sp = ss.weibull_min.fit(all_data, optimizer='powell')
            self.alpha = sp[2]
            self.beta = sp[0]
            self.gamma = sp[1]

        params = [self.alpha, self.beta, self.gamma]
        self.loglik2 = LL2
        if n - k - 1 > 0:
            self.AICc = 2 * k + LL2 + (2 * k ** 2 + 2 * k) / (n - k - 1)
        else:
            self.AICc = 'Insufficient data'
        self.BIC = np.log(n) * k + LL2
        self.distribution = Weibull_Distribution(alpha=self.alpha, beta=self.beta, gamma=self.gamma)

        # confidence interval estimates of parameters
        Z = -ss.norm.ppf((1 - CI) / 2)
        hessian_matrix = hessian(Fit_Weibull_3P.LL)(np.array(tuple(params)), np.array(tuple(failures)), np.array(tuple(right_censored)))
        covariance_matrix = np.linalg.inv(hessian_matrix)
        self.alpha_SE = abs(covariance_matrix[0][0]) ** 0.5
        self.beta_SE = abs(covariance_matrix[1][1]) ** 0.5
        self.gamma_SE = abs(covariance_matrix[2][2]) ** 0.5
        self.alpha_upper = self.alpha * (np.exp(Z * (self.alpha_SE / self.alpha)))
        self.alpha_lower = self.alpha * (np.exp(-Z * (self.alpha_SE / self.alpha)))
        self.beta_upper = self.beta * (np.exp(Z * (self.beta_SE / self.beta)))
        self.beta_lower = self.beta * (np.exp(-Z * (self.beta_SE / self.beta)))
        self.gamma_upper = self.gamma * (np.exp(Z * (self.gamma_SE / self.gamma)))  # here we assume gamma can only be positive as there are bounds placed on it in the optimizer. Minitab assumes positive or negative so bounds are different
        self.gamma_lower = self.gamma * (np.exp(-Z * (self.gamma_SE / self.gamma)))

        Data = {'Parameter': ['Alpha', 'Beta', 'Gamma'],
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
if type(failures) != np.ndarray:
            raise TypeError('failures must be a list or array of failure data')
        if type(right_censored) == list:
            right_censored = np.array(right_censored)
        if type(right_censored) != np.ndarray:
            raise TypeError('right_censored must be a list or array of right censored failure data')
        all_data = np.hstack([failures, right_censored])

        # solve it
        self.gamma = 0
        sp = ss.weibull_min.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer

        if force_beta is None:
            guess = [sp[2], sp[0]]
            result = minimize(value_and_grad(Fit_Weibull_2P.LL), guess, args=(failures, right_censored), jac=True, tol=1e-6, method='nelder-mead')
        else:
            guess = [sp[2]]
            result = minimize(value_and_grad(Fit_Weibull_2P.LL_fb), guess, args=(failures, right_censored, force_beta), jac=True, tol=1e-6, method='nelder-mead')

        if result.success is True:
            params = result.x
            self.success = True
            if force_beta is None:
                self.alpha = params[0]
                self.beta = params[1]
            else:
                self.alpha = params * 1  # the *1 converts ndarray to float64
                self.beta = force_beta
        else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Weibull_2P. The fit from Scipy was used instead so results may not be accurate.')
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
right_censored = np.array(right_censored)
        if type(right_censored) != np.ndarray:
            raise TypeError('right_censored must be a list or array of right censored failure data')
        all_data = np.hstack([failures, right_censored])

        # solve it
        self.gamma = 0
        sp = ss.weibull_min.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer

        if force_beta is None:
            guess = [sp[2], sp[0]]
            result = minimize(value_and_grad(Fit_Weibull_2P.LL), guess, args=(failures, right_censored), jac=True, tol=1e-6, method='nelder-mead')
        else:
            guess = [sp[2]]
            result = minimize(value_and_grad(Fit_Weibull_2P.LL_fb), guess, args=(failures, right_censored, force_beta), jac=True, tol=1e-6, method='nelder-mead')

        if result.success is True:
            params = result.x
            self.success = True
            if force_beta is None:
                self.alpha = params[0]
                self.beta = params[1]
            else:
                self.alpha = params * 1  # the *1 converts ndarray to float64
                self.beta = force_beta
        else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Weibull_2P. The fit from Scipy was used instead so results may not be accurate.')
            self.alpha = sp[2]
            self.beta = sp[0]
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
all_data_shifted = np.hstack([failures_shifted, right_censored_shifted])
        sp = ss.lognorm.fit(all_data_shifted, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        guess = [np.log(sp[2]), sp[0]]
        warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer
        result = minimize(value_and_grad(Fit_Lognormal_2P.LL), guess, args=(failures_shifted, right_censored_shifted), jac=True, tol=1e-2, method='nelder-mead')

        if result.success is True:
            params = result.x
            mu = params[0]
            sigma = params[1]
        else:
            print('WARNING: Fitting using Autograd FAILED for the gamma optimisation section of Lognormal_3P. The fit from Scipy was used instead so results may not be accurate.')
            mu = sp[2]
            sigma = sp[0]

        LL2 = 2 * Fit_Lognormal_2P.LL([mu, sigma], failures_shifted, right_censored_shifted)
        return LL2
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
if type(right_censored) != np.ndarray:
            raise TypeError('right_censored must be a list or array of right censored failure data')

        self.gamma = 0
        all_data = np.hstack([failures, right_censored])

        # solve it
        sp = ss.lognorm.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        if force_sigma is None:
            bnds = [(0.0001, None), (0.0001, None)]  # bounds of solution
            guess = [np.log(sp[2]), sp[0]]
            result = minimize(value_and_grad(Fit_Lognormal_2P.LL), guess, args=(failures, right_censored), jac=True, bounds=bnds, tol=1e-6)
        else:
            bnds = [(0.0001, None)]  # bounds of solution
            guess = [np.log(sp[2])]
            result = minimize(value_and_grad(Fit_Lognormal_2P.LL_fs), guess, args=(failures, right_censored, force_sigma), jac=True, bounds=bnds, tol=1e-6)

        if result.success is True:
            params = result.x
            self.success = True
            if force_sigma is None:
                self.mu = params[0]
                self.sigma = params[1]
            else:
                self.mu = params[0]
                self.sigma = force_sigma

        else:
            self.success = False
            warnings.warn('Fitting using Autograd FAILED for Lognormal_2P. The fit from Scipy was used instead so results may not be accurate.')
            self.mu = np.log(sp[2])
            self.sigma = sp[0]
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
print('WARNING: Fitting using Autograd FAILED for Expon_2P. The fit from Scipy was used instead so results may not be accurate.')
            sp = ss.expon.fit(all_data, optimizer='powell')
            self.Lambda = sp[1]
            self.gamma = sp[0]

        self.loglik2 = LL2
        if n - k - 1 > 0:
            self.AICc = 2 * k + LL2 + (2 * k ** 2 + 2 * k) / (n - k - 1)
        else:
            self.AICc = 'Insufficient data'
        self.BIC = np.log(n) * k + LL2
        self.distribution = Exponential_Distribution(Lambda=self.Lambda, gamma=self.gamma)

        # confidence interval estimates of parameters. Uses Expon_1P because gamma (while optimized) cannot be used in the MLE solution as the solution is unbounded
        Z = -ss.norm.ppf((1 - CI) / 2)
        hessian_matrix = hessian(Fit_Expon_1P.LL)(np.array(tuple([self.Lambda])), np.array(tuple(failures - self.gamma)), np.array(tuple(right_censored - self.gamma)))
        covariance_matrix = np.linalg.inv(hessian_matrix)
        self.Lambda_SE = abs(covariance_matrix[0][0]) ** 0.5
        self.gamma_SE = 0
        self.Lambda_upper = self.Lambda * (np.exp(Z * (self.Lambda_SE / self.Lambda)))
        self.Lambda_lower = self.Lambda * (np.exp(-Z * (self.Lambda_SE / self.Lambda)))
        self.gamma_upper = self.gamma
        self.gamma_lower = self.gamma
        self.Lambda_inv = 1 / self.Lambda
        self.Lambda_SE_inv = abs(1 / self.Lambda * np.log(self.Lambda / self.Lambda_upper) / Z)
        self.Lambda_lower_inv = 1 / self.Lambda_upper
        self.Lambda_upper_inv = 1 / self.Lambda_lower

        Data = {'Parameter': ['Lambda', '1/Lambda', 'Gamma'],
                'Point Estimate': [self.Lambda, self.Lambda_inv, self.gamma],
                'Standard Error': [self.Lambda_SE, self.Lambda_SE_inv, self.gamma_SE],
                'Lower CI': [self.Lambda_lower, self.Lambda_lower_inv, self.gamma_lower],
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
if type(failures) == list:
            failures = np.array(failures)
        if type(failures) != np.ndarray:
            raise TypeError('failures must be a list or array of failure data')
        if type(right_censored) == list:
            right_censored = np.array(right_censored)
        if type(right_censored) != np.ndarray:
            raise TypeError('right_censored must be a list or array of right censored failure data')
        all_data = np.hstack([failures, right_censored])

        # solve it
        self.gamma = 0
        sp = ss.expon.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        guess = [1 / sp[1]]
        warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer
        result = minimize(value_and_grad(Fit_Expon_1P.LL), guess, args=(failures, right_censored), jac=True, tol=1e-6, method='nelder-mead')

        if result.success is True:
            params = result.x
            self.success = True
            self.Lambda = params[0]
        else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Expon_1P. The fit from Scipy was used instead so results may not be accurate.')
            self.Lambda = 1 / sp[1]

        params = [self.Lambda]
        k = len(params)
        n = len(all_data)
        LL2 = 2 * Fit_Expon_1P.LL(params, failures, right_censored)
        self.loglik2 = LL2
        if n - k - 1 > 0:
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
if type(right_censored) == list:
            right_censored = np.array(right_censored)
        if type(right_censored) != np.ndarray:
            raise TypeError('right_censored must be a list or array of right censored failure data')
        all_data = np.hstack([failures, right_censored])

        # solve it
        self.gamma = 0
        sp = ss.gamma.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        warnings.filterwarnings('ignore')
        if force_beta is None:
            guess = [sp[2], sp[0]]
            result = minimize(value_and_grad(Fit_Gamma_2P.LL), guess, args=(failures, right_censored), jac=True, method='nelder-mead', tol=1e-10)
        else:
            guess = [sp[2]]
            result = minimize(value_and_grad(Fit_Gamma_2P.LL_fb), guess, args=(failures, right_censored, force_beta), jac=True, method='nelder-mead', tol=1e-10)

        if result.success is True:
            params = result.x
            self.success = True
            if force_beta is None:
                self.alpha = params[0]
                self.beta = params[1]
            else:
                self.alpha = params[0]
                self.beta = force_beta
        else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Gamma_2P. The fit from Scipy was used instead so results may not be accurate.')
            self.alpha = sp[2]
            self.beta = sp[0]
            self.gamma = sp[1]
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
failures = np.array(failures)
        if type(failures) != np.ndarray:
            raise TypeError('failures must be a list or array of failure data')
        if type(right_censored) == list:
            right_censored = np.array(right_censored)
        if type(right_censored) != np.ndarray:
            raise TypeError('right_censored must be a list or array of right censored failure data')
        all_data = np.hstack([failures, right_censored])

        # solve it
        self.gamma = 0
        sp = ss.gamma.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
        warnings.filterwarnings('ignore')
        if force_beta is None:
            guess = [sp[2], sp[0]]
            result = minimize(value_and_grad(Fit_Gamma_2P.LL), guess, args=(failures, right_censored), jac=True, method='nelder-mead', tol=1e-10)
        else:
            guess = [sp[2]]
            result = minimize(value_and_grad(Fit_Gamma_2P.LL_fb), guess, args=(failures, right_censored, force_beta), jac=True, method='nelder-mead', tol=1e-10)

        if result.success is True:
            params = result.x
            self.success = True
            if force_beta is None:
                self.alpha = params[0]
                self.beta = params[1]
            else:
                self.alpha = params[0]
                self.beta = force_beta
        else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Gamma_2P. The fit from Scipy was used instead so results may not be accurate.')
github MatthewReid854 / reliability / reliability / Fitters.py View on Github external
runs += 1
            if inv is True:
                result = minimize(value_and_grad(Fit_Expon_2P.LL_inv), guess, args=(failures, right_censored), jac=True, method='L-BFGS-B', bounds=bnds2)
            if result.success is False or inv is False:
                if runs == 1:
                    guess = [1 / sp[1], self.gamma]  # fix the guess to be the non-inverted form
                    self.initial_guess = guess
                result = minimize(value_and_grad(Fit_Expon_2P.LL), guess, args=(failures, right_censored), jac=True, method='L-BFGS-B', bounds=bnds2)
                inv = False  # inversion status changed for subsequent loops

            params = result.x
            guess = [params[0], params[1]]
            if inv is False:
                LL2 = 2 * Fit_Expon_2P.LL(guess, failures, right_censored)
            else:
                LL2 = 2 * Fit_Expon_2P.LL_inv(guess, failures, right_censored)
            BIC_array.append(np.log(n) * k + LL2)
            delta_BIC = abs(BIC_array[-1] - BIC_array[-2])

        if result.success is True:
            params = result.x
            self.success = True
            if inv is False:
                self.Lambda = params[0]
            else:
                self.Lambda = 1 / params[0]
            self.gamma = params[1]
        else:
            self.success = False
            print('WARNING: Fitting using Autograd FAILED for Expon_2P. The fit from Scipy was used instead so results may not be accurate.')
            sp = ss.expon.fit(all_data, optimizer='powell')
            self.Lambda = sp[1]