Skip to content

Commit 51ca717

Browse files
DOC fix random_state in example for reproducibility cont'd (scikit-learn#27238)
Co-authored-by: Guillaume Lemaitre <g.lemaitre58@gmail.com>
1 parent 872c19e commit 51ca717

File tree

3 files changed

+16
-8
lines changed

3 files changed

+16
-8
lines changed

examples/manifold/plot_compare_methods.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def add_2d_scatter(ax, points, points_color, title=None):
182182
# Read more in the :ref:`User Guide <spectral_embedding>`.
183183

184184
spectral = manifold.SpectralEmbedding(
185-
n_components=n_components, n_neighbors=n_neighbors
185+
n_components=n_components, n_neighbors=n_neighbors, random_state=42
186186
)
187187
S_spectral = spectral.fit_transform(S_points)
188188

examples/manifold/plot_manifold_sphere.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@
7878
t0 = time()
7979
trans_data = (
8080
manifold.LocallyLinearEmbedding(
81-
n_neighbors=n_neighbors, n_components=2, method=method
81+
n_neighbors=n_neighbors, n_components=2, method=method, random_state=42
8282
)
8383
.fit_transform(sphere_data)
8484
.T
@@ -112,7 +112,7 @@
112112

113113
# Perform Multi-dimensional scaling.
114114
t0 = time()
115-
mds = manifold.MDS(2, max_iter=100, n_init=1, normalized_stress="auto")
115+
mds = manifold.MDS(2, max_iter=100, n_init=1, normalized_stress="auto", random_state=42)
116116
trans_data = mds.fit_transform(sphere_data).T
117117
t1 = time()
118118
print("MDS: %.2g sec" % (t1 - t0))
@@ -126,7 +126,9 @@
126126

127127
# Perform Spectral Embedding.
128128
t0 = time()
129-
se = manifold.SpectralEmbedding(n_components=2, n_neighbors=n_neighbors)
129+
se = manifold.SpectralEmbedding(
130+
n_components=2, n_neighbors=n_neighbors, random_state=42
131+
)
130132
trans_data = se.fit_transform(sphere_data).T
131133
t1 = time()
132134
print("Spectral Embedding: %.2g sec" % (t1 - t0))

examples/miscellaneous/plot_kernel_approximation.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -72,18 +72,24 @@
7272

7373
# Create a classifier: a support vector classifier
7474
kernel_svm = svm.SVC(gamma=0.2)
75-
linear_svm = svm.LinearSVC(dual="auto")
75+
linear_svm = svm.LinearSVC(dual="auto", random_state=42)
7676

7777
# create pipeline from kernel approximation
7878
# and linear svm
7979
feature_map_fourier = RBFSampler(gamma=0.2, random_state=1)
8080
feature_map_nystroem = Nystroem(gamma=0.2, random_state=1)
8181
fourier_approx_svm = pipeline.Pipeline(
82-
[("feature_map", feature_map_fourier), ("svm", svm.LinearSVC(dual="auto"))]
82+
[
83+
("feature_map", feature_map_fourier),
84+
("svm", svm.LinearSVC(dual="auto", random_state=42)),
85+
]
8386
)
8487

8588
nystroem_approx_svm = pipeline.Pipeline(
86-
[("feature_map", feature_map_nystroem), ("svm", svm.LinearSVC(dual="auto"))]
89+
[
90+
("feature_map", feature_map_nystroem),
91+
("svm", svm.LinearSVC(dual="auto", random_state=42)),
92+
]
8793
)
8894

8995
# fit and predict using linear and kernel svm:
@@ -192,7 +198,7 @@
192198

193199
# visualize the decision surface, projected down to the first
194200
# two principal components of the dataset
195-
pca = PCA(n_components=8).fit(data_train)
201+
pca = PCA(n_components=8, random_state=42).fit(data_train)
196202

197203
X = pca.transform(data_train)
198204

0 commit comments

Comments
 (0)