Skip to content

Commit 4da44c8

Browse files
amuellerNelleV
authored andcommitted
[MRG+1] replaced some assert_true(np.allclose(x, y)) with assert_almost_equal (scikit-learn#7742)
* replaced some assert_true(np.allclose(x, y)) with assert_almost_equal for better error messages. also some pep8. * typo fixes
1 parent 3f4524e commit 4da44c8

File tree

4 files changed

+34
-33
lines changed

4 files changed

+34
-33
lines changed

sklearn/decomposition/tests/test_nmf.py

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def test_initialize_variants():
6868
random_state=0)
6969

7070
for ref, evl in ((W0, Wa), (W0, War), (H0, Ha), (H0, Har)):
71-
assert_true(np.allclose(evl[ref != 0], ref[ref != 0]))
71+
assert_almost_equal(evl[ref != 0], ref[ref != 0])
7272

7373

7474
@ignore_warnings
@@ -128,9 +128,10 @@ def test_nmf_transform_custom_init():
128128
H_init = np.abs(avg * random_state.randn(n_components, 5))
129129
W_init = np.abs(avg * random_state.randn(6, n_components))
130130

131-
m = NMF(solver='cd', n_components=n_components, init='custom', random_state=0)
132-
ft = m.fit_transform(A, W=W_init, H=H_init)
133-
t = m.transform(A)
131+
m = NMF(solver='cd', n_components=n_components, init='custom',
132+
random_state=0)
133+
m.fit_transform(A, W=W_init, H=H_init)
134+
m.transform(A)
134135

135136

136137
@ignore_warnings
@@ -140,7 +141,7 @@ def test_nmf_inverse_transform():
140141
A = np.abs(random_state.randn(6, 4))
141142
for solver in ('pg', 'cd'):
142143
m = NMF(solver=solver, n_components=4, init='random', random_state=0)
143-
ft = m.fit_transform(A)
144+
m.fit_transform(A)
144145
t = m.transform(A)
145146
A_new = m.inverse_transform(t)
146147
assert_array_almost_equal(A, A_new, decimal=2)
@@ -235,9 +236,11 @@ def test_non_negative_factorization_checking():
235236
# Test parameters checking is public function
236237
nnmf = non_negative_factorization
237238
assert_no_warnings(nnmf, A, A, A, np.int64(1))
238-
msg = "Number of components must be a positive integer; got (n_components=1.5)"
239+
msg = ("Number of components must be a positive integer; "
240+
"got (n_components=1.5)")
239241
assert_raise_message(ValueError, msg, nnmf, A, A, A, 1.5)
240-
msg = "Number of components must be a positive integer; got (n_components='2')"
242+
msg = ("Number of components must be a positive integer; "
243+
"got (n_components='2')")
241244
assert_raise_message(ValueError, msg, nnmf, A, A, A, '2')
242245
msg = "Negative values in data passed to NMF (input H)"
243246
assert_raise_message(ValueError, msg, nnmf, A, A, -A, 2, 'custom')

sklearn/gaussian_process/tests/test_gpr.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,8 +41,8 @@ def test_gpr_interpolation():
4141
gpr = GaussianProcessRegressor(kernel=kernel).fit(X, y)
4242
y_pred, y_cov = gpr.predict(X, return_cov=True)
4343

44-
assert_true(np.allclose(y_pred, y))
45-
assert_true(np.allclose(np.diag(y_cov), 0.))
44+
assert_almost_equal(y_pred, y)
45+
assert_almost_equal(np.diag(y_cov), 0.)
4646

4747

4848
def test_lml_improving():

sklearn/preprocessing/tests/test_imputation.py

Lines changed: 20 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,13 @@
66
from sklearn.utils.testing import assert_array_equal
77
from sklearn.utils.testing import assert_raises
88
from sklearn.utils.testing import assert_false
9-
from sklearn.utils.testing import assert_true
109

1110
from sklearn.preprocessing.imputation import Imputer
1211
from sklearn.pipeline import Pipeline
1312
from sklearn.model_selection import GridSearchCV
1413
from sklearn import tree
1514
from sklearn.random_projection import sparse_random_matrix
16-
15+
1716

1817
def _check_statistics(X, X_true,
1918
strategy, statistics, missing_values):
@@ -92,16 +91,16 @@ def test_imputation_mean_median_only_zero():
9291
# Test imputation using the mean and median strategies, when
9392
# missing_values == 0.
9493
X = np.array([
95-
[np.nan, 0, 0, 0, 5],
96-
[np.nan, 1, 0, np.nan, 3],
97-
[np.nan, 2, 0, 0, 0],
98-
[np.nan, 6, 0, 5, 13],
94+
[np.nan, 0, 0, 0, 5],
95+
[np.nan, 1, 0, np.nan, 3],
96+
[np.nan, 2, 0, 0, 0],
97+
[np.nan, 6, 0, 5, 13],
9998
])
10099

101100
X_imputed_mean = np.array([
102-
[3, 5],
103-
[1, 3],
104-
[2, 7],
101+
[3, 5],
102+
[1, 3],
103+
[2, 7],
105104
[6, 13],
106105
])
107106
statistics_mean = [np.nan, 3, np.nan, np.nan, 7]
@@ -144,7 +143,7 @@ def test_imputation_mean_median():
144143
shape = (dim * dim, dim + dec)
145144

146145
zeros = np.zeros(shape[0])
147-
values = np.arange(1, shape[0]+1)
146+
values = np.arange(1, shape[0] + 1)
148147
values[4::2] = - values[4::2]
149148

150149
tests = [("mean", "NaN", lambda z, v, p: safe_mean(np.hstack((z, v)))),
@@ -236,17 +235,17 @@ def test_imputation_median_special_cases():
236235
def test_imputation_most_frequent():
237236
# Test imputation using the most-frequent strategy.
238237
X = np.array([
239-
[-1, -1, 0, 5],
240-
[-1, 2, -1, 3],
241-
[-1, 1, 3, -1],
242-
[-1, 2, 3, 7],
238+
[-1, -1, 0, 5],
239+
[-1, 2, -1, 3],
240+
[-1, 1, 3, -1],
241+
[-1, 2, 3, 7],
243242
])
244243

245244
X_true = np.array([
246-
[2, 0, 5],
247-
[2, 3, 3],
248-
[1, 3, 3],
249-
[2, 3, 7],
245+
[2, 0, 5],
246+
[2, 3, 3],
247+
[1, 3, 3],
248+
[2, 3, 7],
250249
])
251250

252251
# scipy.stats.mode, used in Imputer, doesn't return the first most
@@ -315,23 +314,23 @@ def test_imputation_copy():
315314
imputer = Imputer(missing_values=0, strategy="mean", copy=False)
316315
Xt = imputer.fit(X).transform(X)
317316
Xt[0, 0] = -1
318-
assert_true(np.all(X == Xt))
317+
assert_array_equal(X, Xt)
319318

320319
# copy=False, sparse csr, axis=1 => no copy
321320
X = X_orig.copy()
322321
imputer = Imputer(missing_values=X.data[0], strategy="mean",
323322
copy=False, axis=1)
324323
Xt = imputer.fit(X).transform(X)
325324
Xt.data[0] = -1
326-
assert_true(np.all(X.data == Xt.data))
325+
assert_array_equal(X.data, Xt.data)
327326

328327
# copy=False, sparse csc, axis=0 => no copy
329328
X = X_orig.copy().tocsc()
330329
imputer = Imputer(missing_values=X.data[0], strategy="mean",
331330
copy=False, axis=0)
332331
Xt = imputer.fit(X).transform(X)
333332
Xt.data[0] = -1
334-
assert_true(np.all(X.data == Xt.data))
333+
assert_array_equal(X.data, Xt.data)
335334

336335
# copy=False, sparse csr, axis=0 => copy
337336
X = X_orig.copy()

sklearn/utils/tests/test_extmath.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
from sklearn.utils.testing import assert_false
1818
from sklearn.utils.testing import assert_greater
1919
from sklearn.utils.testing import assert_raises
20-
from sklearn.utils.testing import assert_raise_message
2120
from sklearn.utils.testing import assert_warns
2221
from sklearn.utils.testing import skip_if_32bit
2322
from sklearn.utils.testing import SkipTest
@@ -65,8 +64,8 @@ def test_uniform_weights():
6564
mode, score = stats.mode(x, axis)
6665
mode2, score2 = weighted_mode(x, weights, axis)
6766

68-
assert_true(np.all(mode == mode2))
69-
assert_true(np.all(score == score2))
67+
assert_array_equal(mode, mode2)
68+
assert_array_equal(score, score2)
7069

7170

7271
def test_random_weights():

0 commit comments

Comments
 (0)