# How to use reliability - 10 common examples

## To help you get started, we’ve selected a few reliability examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately. MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Weibull_3P. The fit from Scipy was used instead so the results may not be accurate.')
sp = ss.weibull_min.fit(all_data, optimizer='powell')
self.alpha = sp
self.beta = sp
self.gamma = sp

params = [self.alpha, self.beta, self.gamma]
self.loglik2 = LL2
if n - k - 1 > 0:
self.AICc = 2 * k + LL2 + (2 * k ** 2 + 2 * k) / (n - k - 1)
else:
self.AICc = 'Insufficient data'
self.BIC = np.log(n) * k + LL2
self.distribution = Weibull_Distribution(alpha=self.alpha, beta=self.beta, gamma=self.gamma)

# confidence interval estimates of parameters
Z = -ss.norm.ppf((1 - CI) / 2)
hessian_matrix = hessian(Fit_Weibull_3P.LL)(np.array(tuple(params)), np.array(tuple(failures)), np.array(tuple(right_censored)))
covariance_matrix = np.linalg.inv(hessian_matrix)
self.alpha_SE = abs(covariance_matrix) ** 0.5
self.beta_SE = abs(covariance_matrix) ** 0.5
self.gamma_SE = abs(covariance_matrix) ** 0.5
self.alpha_upper = self.alpha * (np.exp(Z * (self.alpha_SE / self.alpha)))
self.alpha_lower = self.alpha * (np.exp(-Z * (self.alpha_SE / self.alpha)))
self.beta_upper = self.beta * (np.exp(Z * (self.beta_SE / self.beta)))
self.beta_lower = self.beta * (np.exp(-Z * (self.beta_SE / self.beta)))
self.gamma_upper = self.gamma * (np.exp(Z * (self.gamma_SE / self.gamma)))  # here we assume gamma can only be positive as there are bounds placed on it in the optimizer. Minitab assumes positive or negative so bounds are different
self.gamma_lower = self.gamma * (np.exp(-Z * (self.gamma_SE / self.gamma)))

Data = {'Parameter': ['Alpha', 'Beta', 'Gamma'],`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````if type(failures) != np.ndarray:
raise TypeError('failures must be a list or array of failure data')
if type(right_censored) == list:
right_censored = np.array(right_censored)
if type(right_censored) != np.ndarray:
raise TypeError('right_censored must be a list or array of right censored failure data')
all_data = np.hstack([failures, right_censored])

# solve it
self.gamma = 0
sp = ss.weibull_min.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer

if force_beta is None:
guess = [sp, sp]
else:
guess = [sp]

if result.success is True:
params = result.x
self.success = True
if force_beta is None:
self.alpha = params
self.beta = params
else:
self.alpha = params * 1  # the *1 converts ndarray to float64
self.beta = force_beta
else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Weibull_2P. The fit from Scipy was used instead so results may not be accurate.')`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````right_censored = np.array(right_censored)
if type(right_censored) != np.ndarray:
raise TypeError('right_censored must be a list or array of right censored failure data')
all_data = np.hstack([failures, right_censored])

# solve it
self.gamma = 0
sp = ss.weibull_min.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer

if force_beta is None:
guess = [sp, sp]
else:
guess = [sp]

if result.success is True:
params = result.x
self.success = True
if force_beta is None:
self.alpha = params
self.beta = params
else:
self.alpha = params * 1  # the *1 converts ndarray to float64
self.beta = force_beta
else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Weibull_2P. The fit from Scipy was used instead so results may not be accurate.')
self.alpha = sp
self.beta = sp`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````all_data_shifted = np.hstack([failures_shifted, right_censored_shifted])
sp = ss.lognorm.fit(all_data_shifted, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
guess = [np.log(sp), sp]
warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer

if result.success is True:
params = result.x
mu = params
sigma = params
else:
print('WARNING: Fitting using Autograd FAILED for the gamma optimisation section of Lognormal_3P. The fit from Scipy was used instead so results may not be accurate.')
mu = sp
sigma = sp

LL2 = 2 * Fit_Lognormal_2P.LL([mu, sigma], failures_shifted, right_censored_shifted)
return LL2`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````if type(right_censored) != np.ndarray:
raise TypeError('right_censored must be a list or array of right censored failure data')

self.gamma = 0
all_data = np.hstack([failures, right_censored])

# solve it
sp = ss.lognorm.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
if force_sigma is None:
bnds = [(0.0001, None), (0.0001, None)]  # bounds of solution
guess = [np.log(sp), sp]
result = minimize(value_and_grad(Fit_Lognormal_2P.LL), guess, args=(failures, right_censored), jac=True, bounds=bnds, tol=1e-6)
else:
bnds = [(0.0001, None)]  # bounds of solution
guess = [np.log(sp)]
result = minimize(value_and_grad(Fit_Lognormal_2P.LL_fs), guess, args=(failures, right_censored, force_sigma), jac=True, bounds=bnds, tol=1e-6)

if result.success is True:
params = result.x
self.success = True
if force_sigma is None:
self.mu = params
self.sigma = params
else:
self.mu = params
self.sigma = force_sigma

else:
self.success = False
warnings.warn('Fitting using Autograd FAILED for Lognormal_2P. The fit from Scipy was used instead so results may not be accurate.')
self.mu = np.log(sp)
self.sigma = sp`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````print('WARNING: Fitting using Autograd FAILED for Expon_2P. The fit from Scipy was used instead so results may not be accurate.')
sp = ss.expon.fit(all_data, optimizer='powell')
self.Lambda = sp
self.gamma = sp

self.loglik2 = LL2
if n - k - 1 > 0:
self.AICc = 2 * k + LL2 + (2 * k ** 2 + 2 * k) / (n - k - 1)
else:
self.AICc = 'Insufficient data'
self.BIC = np.log(n) * k + LL2
self.distribution = Exponential_Distribution(Lambda=self.Lambda, gamma=self.gamma)

# confidence interval estimates of parameters. Uses Expon_1P because gamma (while optimized) cannot be used in the MLE solution as the solution is unbounded
Z = -ss.norm.ppf((1 - CI) / 2)
hessian_matrix = hessian(Fit_Expon_1P.LL)(np.array(tuple([self.Lambda])), np.array(tuple(failures - self.gamma)), np.array(tuple(right_censored - self.gamma)))
covariance_matrix = np.linalg.inv(hessian_matrix)
self.Lambda_SE = abs(covariance_matrix) ** 0.5
self.gamma_SE = 0
self.Lambda_upper = self.Lambda * (np.exp(Z * (self.Lambda_SE / self.Lambda)))
self.Lambda_lower = self.Lambda * (np.exp(-Z * (self.Lambda_SE / self.Lambda)))
self.gamma_upper = self.gamma
self.gamma_lower = self.gamma
self.Lambda_inv = 1 / self.Lambda
self.Lambda_SE_inv = abs(1 / self.Lambda * np.log(self.Lambda / self.Lambda_upper) / Z)
self.Lambda_lower_inv = 1 / self.Lambda_upper
self.Lambda_upper_inv = 1 / self.Lambda_lower

Data = {'Parameter': ['Lambda', '1/Lambda', 'Gamma'],
'Point Estimate': [self.Lambda, self.Lambda_inv, self.gamma],
'Standard Error': [self.Lambda_SE, self.Lambda_SE_inv, self.gamma_SE],
'Lower CI': [self.Lambda_lower, self.Lambda_lower_inv, self.gamma_lower],`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````if type(failures) == list:
failures = np.array(failures)
if type(failures) != np.ndarray:
raise TypeError('failures must be a list or array of failure data')
if type(right_censored) == list:
right_censored = np.array(right_censored)
if type(right_censored) != np.ndarray:
raise TypeError('right_censored must be a list or array of right censored failure data')
all_data = np.hstack([failures, right_censored])

# solve it
self.gamma = 0
sp = ss.expon.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
guess = [1 / sp]
warnings.filterwarnings('ignore')  # necessary to supress the warning about the jacobian when using the nelder-mead optimizer

if result.success is True:
params = result.x
self.success = True
self.Lambda = params
else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Expon_1P. The fit from Scipy was used instead so results may not be accurate.')
self.Lambda = 1 / sp

params = [self.Lambda]
k = len(params)
n = len(all_data)
LL2 = 2 * Fit_Expon_1P.LL(params, failures, right_censored)
self.loglik2 = LL2
if n - k - 1 > 0:`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````if type(right_censored) == list:
right_censored = np.array(right_censored)
if type(right_censored) != np.ndarray:
raise TypeError('right_censored must be a list or array of right censored failure data')
all_data = np.hstack([failures, right_censored])

# solve it
self.gamma = 0
sp = ss.gamma.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
warnings.filterwarnings('ignore')
if force_beta is None:
guess = [sp, sp]
else:
guess = [sp]

if result.success is True:
params = result.x
self.success = True
if force_beta is None:
self.alpha = params
self.beta = params
else:
self.alpha = params
self.beta = force_beta
else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Gamma_2P. The fit from Scipy was used instead so results may not be accurate.')
self.alpha = sp
self.beta = sp
self.gamma = sp`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````failures = np.array(failures)
if type(failures) != np.ndarray:
raise TypeError('failures must be a list or array of failure data')
if type(right_censored) == list:
right_censored = np.array(right_censored)
if type(right_censored) != np.ndarray:
raise TypeError('right_censored must be a list or array of right censored failure data')
all_data = np.hstack([failures, right_censored])

# solve it
self.gamma = 0
sp = ss.gamma.fit(all_data, floc=0, optimizer='powell')  # scipy's answer is used as an initial guess. Scipy is only correct when there is no censored data
warnings.filterwarnings('ignore')
if force_beta is None:
guess = [sp, sp]
else:
guess = [sp]

if result.success is True:
params = result.x
self.success = True
if force_beta is None:
self.alpha = params
self.beta = params
else:
self.alpha = params
self.beta = force_beta
else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Gamma_2P. The fit from Scipy was used instead so results may not be accurate.')`````` MatthewReid854 / reliability / reliability / Fitters.py View on Github ``````runs += 1
if inv is True:
result = minimize(value_and_grad(Fit_Expon_2P.LL_inv), guess, args=(failures, right_censored), jac=True, method='L-BFGS-B', bounds=bnds2)
if result.success is False or inv is False:
if runs == 1:
guess = [1 / sp, self.gamma]  # fix the guess to be the non-inverted form
self.initial_guess = guess
result = minimize(value_and_grad(Fit_Expon_2P.LL), guess, args=(failures, right_censored), jac=True, method='L-BFGS-B', bounds=bnds2)
inv = False  # inversion status changed for subsequent loops

params = result.x
guess = [params, params]
if inv is False:
LL2 = 2 * Fit_Expon_2P.LL(guess, failures, right_censored)
else:
LL2 = 2 * Fit_Expon_2P.LL_inv(guess, failures, right_censored)
BIC_array.append(np.log(n) * k + LL2)
delta_BIC = abs(BIC_array[-1] - BIC_array[-2])

if result.success is True:
params = result.x
self.success = True
if inv is False:
self.Lambda = params
else:
self.Lambda = 1 / params
self.gamma = params
else:
self.success = False
print('WARNING: Fitting using Autograd FAILED for Expon_2P. The fit from Scipy was used instead so results may not be accurate.')
sp = ss.expon.fit(all_data, optimizer='powell')
self.Lambda = sp``````

## reliability

Reliability Engineering toolkit for Python GitHub LGPL-3.0 Latest version published 23 days ago

75 / 100

## Popular Python code snippets

Find secure code to use in your application or website