Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_no_params(self):
clf = PCA()
X = np.array(
[0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
5.00, 5.50])
X = X[:, np.newaxis]
with self.assertWarns(PrivacyLeakWarning):
clf.fit(X)
def test_no_params(self):
clf = LinearRegression()
X = np.array(
[0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
5.00, 5.50])
y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
X = X[:, np.newaxis]
with self.assertWarns(PrivacyLeakWarning):
clf.fit(X, y)
def test_simple(self):
global_seed(3141592653)
clf = KMeans(5, [(0, 1)], 3)
X = np.zeros(1000) + 0.1
X[:666] = 0.5
X[:333] = 0.9
X = X.reshape(-1, 1)
clf.fit(X)
centers = clf.cluster_centers_
self.assertTrue(np.isclose(centers, 0.1, atol=0.05).any())
self.assertTrue(np.isclose(centers, 0.5, atol=0.05).any())
self.assertTrue(np.isclose(centers, 0.9, atol=0.05).any())
def test_trinomial(self):
X = np.array(
[0.50, 0.75, 1.00])
y = np.array([0, 1, 2])
X = X[:, np.newaxis]
clf = LogisticRegression(data_norm=1.0)
self.assertIsNotNone(clf.fit(X, y))
def test_no_params(self):
clf = LogisticRegression()
X = np.array(
[0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
5.00, 5.50])
y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
X = X[:, np.newaxis]
with self.assertWarns(PrivacyLeakWarning):
clf.fit(X, y)
def test_same_results(self):
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn import linear_model
dataset = datasets.load_iris()
X_train, X_test, y_train, y_test = train_test_split(dataset.data, dataset.target, test_size=0.2)
clf = LogisticRegression(data_norm=12, epsilon=float("inf"))
clf.fit(X_train, y_train)
predict1 = clf.predict(X_test)
clf = linear_model.LogisticRegression(solver="lbfgs", multi_class="ovr")
clf.fit(X_train, y_train)
predict2 = clf.predict(X_test)
self.assertTrue(np.all(predict1 == predict2))
def test_sample_weight_warning(self):
X = np.array(
[0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
5.00, 5.50])
y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
X = X[:, np.newaxis]
clf = LogisticRegression(data_norm=5.5)
with self.assertWarns(DiffprivlibCompatibilityWarning):
clf.fit(X, y, sample_weight=np.ones_like(y))
def test_different_results(self):
from sklearn import datasets
from sklearn import linear_model
from sklearn.model_selection import train_test_split
dataset = datasets.load_iris()
X_train, X_test, y_train, y_test = train_test_split(dataset.data, dataset.target, test_size=0.2)
clf = LogisticRegression(data_norm=12)
clf.fit(X_train, y_train)
predict1 = clf.predict(X_test)
clf = LogisticRegression(data_norm=12)
clf.fit(X_train, y_train)
predict2 = clf.predict(X_test)
clf = linear_model.LogisticRegression(solver="lbfgs", multi_class="ovr")
clf.fit(X_train, y_train)
predict3 = clf.predict(X_test)
self.assertFalse(np.all(predict1 == predict2))
self.assertFalse(np.all(predict3 == predict1) and np.all(predict3 == predict2))
def test_simple(self):
X = np.array(
[0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
5.00, 5.50])
y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
X = X[:, np.newaxis]
X -= 3.0
X /= 2.5
clf = LogisticRegression(epsilon=2, data_norm=1.0)
clf.fit(X, y)
# print(clf.predict(np.array([0.5, 2, 5.5])))
self.assertIsNotNone(clf)
self.assertFalse(clf.predict(np.array([(0.5 - 3) / 2.5]).reshape(-1, 1)))
self.assertTrue(clf.predict(np.array([(5.5 - 3) / 2.5]).reshape(-1, 1)))
def test_large_norm(self):
X = np.array(
[0.50, 0.75, 1.00, 1.25, 1.50, 1.75, 1.75, 2.00, 2.25, 2.50, 2.75, 3.00, 3.25, 3.50, 4.00, 4.25, 4.50, 4.75,
5.00, 5.50])
y = np.array([0, 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1])
X = X[:, np.newaxis]
clf = LogisticRegression(data_norm=1.0)
with self.assertWarns(PrivacyLeakWarning):
clf.fit(X, y)