How to use the aif360.algorithms.inprocessing.celisMeta.utils function in aif360

To help you get started, we’ve selected a few aif360 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / FalseDiscovery.py View on Github external
def getValueForX(self, dist_params, a,b, params, samples,  z_0, z_1, x, flag):
				u_1, u_2, l_1, l_2 = params[0], params[1], params[2], params[3]
				#print (params)
				a, b = a[0], b[0]

				temp = np.append(np.append(x, 1), 1)
				prob_1_1 = ut.getProbability(dist_params, temp)

				temp = np.append(np.append(x, -1), 1)
				prob_m1_1 = ut.getProbability(dist_params, temp)

				temp = np.append(np.append(x, 1), 0)
				prob_1_0 = ut.getProbability(dist_params, temp)

				temp = np.append(np.append(x, -1), 0)
				prob_m1_0 = ut.getProbability(dist_params, temp)

				if (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1) == 0:
					print("Probability is 0.\n")
					return 0

				prob_y_1 = (prob_1_1 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
				#print(prob_y_1)

				prob_z_0 = (prob_m1_0 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
				prob_z_1 = (prob_m1_1 + prob_1_1) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / StatisticalRate.py View on Github external
def getValueForX(self, dist_params, a,b, params, samples,  z_0, z_1, x, flag):
			a, b = a[0], b[0]
			l_1, l_2 = params[0], params[1]

			temp = np.append(np.append(x, 1), 1)
			prob_1_1 = ut.getProbability(dist_params, temp)

			temp = np.append(np.append(x, -1), 1)
			prob_m1_1 = ut.getProbability(dist_params, temp)

			temp = np.append(np.append(x, 1), 0)
			prob_1_0 = ut.getProbability(dist_params, temp)

			temp = np.append(np.append(x, -1), 0)
			prob_m1_0 = ut.getProbability(dist_params, temp)
			if (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1) == 0:
				#print("Probability is 0.\n")
				return 0


			prob_y_1 = (prob_1_1 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
			#print(prob_y_1)

			prob_z_0 = (prob_m1_0 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
			prob_z_1 = (prob_m1_1 + prob_1_1) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)

			c_0 = prob_y_1 - 0.5
			c_1 = prob_z_0/z_0
github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / General.py View on Github external
def getUnbiasedModel(self, x_train, y_train, x_control_train):
		dist_params, dist_params_train =  ut.getDistribution(x_train, y_train, x_control_train)
		eps = 0.01
		z_1 = sum(x_control_train)/(float(len(x_control_train)))
		z_0 = 1 - z_1
		p, q  = [0,0],[0,0]
		params = [0]*self.getNumOfParams()
		samples = ut.getRandomSamples(dist_params_train)

		def model(x):
			return self.getValueForX(dist_params, p, q, params, samples,  z_0, z_1, x, 0)

		return model
github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / StatisticalRate.py View on Github external
def getExpectedGrad(self, dist_params, params, samples, mu,  z_0, z_1, a, b):
		a, b = a[0], b[0]
		l_1, l_2 = params[0], params[1]
		res1 = []
		res2 = []
		for x in samples:
			temp = np.append(np.append(x, 1), 1)
			prob_1_1 = ut.getProbability(dist_params, temp)

			temp = np.append(np.append(x, -1), 1)
			prob_m1_1 = ut.getProbability(dist_params, temp)

			temp = np.append(np.append(x, 1), 0)
			prob_1_0 = ut.getProbability(dist_params, temp)

			temp = np.append(np.append(x, -1), 0)
			prob_m1_0 = ut.getProbability(dist_params, temp)


			prob_y_1 = (prob_1_1 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
			#print(prob_y_1)

			prob_z_0 = (prob_m1_0 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
			prob_z_1 = (prob_m1_1 + prob_1_1) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)

			c_0 = prob_y_1 - 0.5
			c_1 = prob_z_0/z_0
github IBM / AIF360 / aif360 / algorithms / inprocessing / celisMeta / FalseDiscovery.py View on Github external
def getExpectedGrad(self, dist_params, params, samples, mu,  z_0, z_1, a, b):
		u_1, u_2, l_1, l_2 = params[0], params[1], params[2], params[3]
		a, b = a[0], b[0]
		res1 = []
		res2 = []
		res3 = []
		res4 = []
		for x in samples:
				temp = np.append(np.append(x, 1), 1)
				prob_1_1 = ut.getProbability(dist_params, temp)

				temp = np.append(np.append(x, -1), 1)
				prob_m1_1 = ut.getProbability(dist_params, temp)

				temp = np.append(np.append(x, 1), 0)
				prob_1_0 = ut.getProbability(dist_params, temp)

				temp = np.append(np.append(x, -1), 0)
				prob_m1_0 = ut.getProbability(dist_params, temp)


				prob_y_1 = (prob_1_1 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
				#print(prob_y_1)

				prob_z_0 = (prob_m1_0 + prob_1_0) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)
				prob_z_1 = (prob_m1_1 + prob_1_1) / (prob_1_1 + prob_1_0 + prob_m1_0 + prob_m1_1)