Skip to content

Commit ae2ccbf

Browse files
authored
MAINT fix FutureWarning raised by scikit-learn in examples (scikit-learn#27442)
1 parent 59d5236 commit ae2ccbf

File tree

5 files changed

+17
-8
lines changed

5 files changed

+17
-8
lines changed

doc/conf.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -701,7 +701,6 @@ def setup(app):
701701
),
702702
)
703703

704-
705704
# maps functions with a class name that is indistinguishable when case is
706705
# ignore to another filename
707706
autosummary_filename_map = {

examples/cluster/plot_face_compress.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,11 @@
7777

7878
n_bins = 8
7979
encoder = KBinsDiscretizer(
80-
n_bins=n_bins, encode="ordinal", strategy="uniform", random_state=0
80+
n_bins=n_bins,
81+
encode="ordinal",
82+
strategy="uniform",
83+
random_state=0,
84+
subsample=200_000,
8185
)
8286
compressed_raccoon_uniform = encoder.fit_transform(raccoon_face.reshape(-1, 1)).reshape(
8387
raccoon_face.shape
@@ -122,7 +126,11 @@
122126
# find a more optimal mapping.
123127

124128
encoder = KBinsDiscretizer(
125-
n_bins=n_bins, encode="ordinal", strategy="kmeans", random_state=0
129+
n_bins=n_bins,
130+
encode="ordinal",
131+
strategy="kmeans",
132+
random_state=0,
133+
subsample=200_000,
126134
)
127135
compressed_raccoon_kmeans = encoder.fit_transform(raccoon_face.reshape(-1, 1)).reshape(
128136
raccoon_face.shape

examples/linear_model/plot_ard.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,8 @@
5858
from sklearn.linear_model import ARDRegression, BayesianRidge, LinearRegression
5959

6060
olr = LinearRegression().fit(X, y)
61-
brr = BayesianRidge(compute_score=True, n_iter=30).fit(X, y)
62-
ard = ARDRegression(compute_score=True, n_iter=30).fit(X, y)
61+
brr = BayesianRidge(compute_score=True, max_iter=30).fit(X, y)
62+
ard = ARDRegression(compute_score=True, max_iter=30).fit(X, y)
6363
df = pd.DataFrame(
6464
{
6565
"Weights of true generative process": true_weights,
@@ -117,7 +117,7 @@
117117

118118
# %%
119119
# Indeed, both models minimize the log-likelihood up to an arbitrary cutoff
120-
# defined by the `n_iter` parameter.
120+
# defined by the `max_iter` parameter.
121121
#
122122
# Bayesian regressions with polynomial feature expansion
123123
# ======================================================

examples/preprocessing/plot_discretization_strategies.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,9 @@
7676
i += 1
7777
# transform the dataset with KBinsDiscretizer
7878
for strategy in strategies:
79-
enc = KBinsDiscretizer(n_bins=4, encode="ordinal", strategy=strategy)
79+
enc = KBinsDiscretizer(
80+
n_bins=4, encode="ordinal", strategy=strategy, subsample=200_000
81+
)
8082
enc.fit(X)
8183
grid_encoded = enc.transform(grid)
8284

examples/release_highlights/plot_release_highlights_0_22_0.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@
8989
X, y = load_iris(return_X_y=True)
9090
estimators = [
9191
("rf", RandomForestClassifier(n_estimators=10, random_state=42)),
92-
("svr", make_pipeline(StandardScaler(), LinearSVC(random_state=42))),
92+
("svr", make_pipeline(StandardScaler(), LinearSVC(dual="auto", random_state=42))),
9393
]
9494
clf = StackingClassifier(estimators=estimators, final_estimator=LogisticRegression())
9595
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, random_state=42)

0 commit comments

Comments
 (0)