How to use the aif360.algorithms.inprocessing.celisMeta.utils.getDistribution function in aif360

To help you get started, we’ve selected a few aif360 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / General.py View on Github external
def getModel(self, tau, x_train, y_train, x_control_train):
		if tau == 0:
			return self.getUnbiasedModel(x_train, y_train, x_control_train)

		dist_params, dist_params_train =  ut.getDistribution(x_train, y_train, x_control_train)
		eps = 0.01
		L = math.ceil(tau/eps)
		z_1 = sum(x_control_train)/(float(len(x_control_train)))
		z_0 = 1 - z_1
		p, q  = [0,0],[0,0]
		paramsOpt, samples = [], []
		maxAcc = 0
		maxGamma = 0

		span = self.getRange(eps, tau)
		for (a,b) in span:
			acc, gamma = 0, 0
			#print("-----",a,b)
			samples = ut.getRandomSamples(dist_params_train)

			#try :
github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / General.py View on Github external
def getUnbiasedModel(self, x_train, y_train, x_control_train):
		dist_params, dist_params_train =  ut.getDistribution(x_train, y_train, x_control_train)
		eps = 0.01
		z_1 = sum(x_control_train)/(float(len(x_control_train)))
		z_0 = 1 - z_1
		p, q  = [0,0],[0,0]
		params = [0]*self.getNumOfParams()
		samples = ut.getRandomSamples(dist_params_train)

		def model(x):
			return self.getValueForX(dist_params, p, q, params, samples,  z_0, z_1, x, 0)

		return model
github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / General.py View on Github external
def testSyntheticData(self):
		#A,S,F = [],[],[]
		x_train, y_train, x_control_train, x_control_test, x_test, y_test = ut.getData()
		dist_params, dist_params_train =  ut.getDistribution(x_train, y_train, x_control_train)

		mean, cov, meanT, covT = dist_params["mean"], dist_params["cov"], dist_params_train["mean"], dist_params_train["cov"]
		#print(mean)
		meanN = [0] * len(mean)
		covN = np.identity(len(mean))

		#clf = GaussianMixture(n_components=2, covariance_type='full')
		means = [mean, meanN]
		covariances = [cov, covN]
		lw = float(sys.argv[2])
		weights = [1-lw, lw]

		#for i in range(0,4):
		LR, LE = len(y_train), len(y_test)
		train, test = [],[]
		for i in range(0, LR):