@@ -37,17 +37,16 @@ is an estimator object::
37
37
>>> from sklearn.pipeline import Pipeline
38
38
>>> from sklearn.svm import SVC
39
39
>>> from sklearn.decomposition import PCA
40
- >>> estimators = [('reduce_dim', PCA()), ('svm ', SVC())]
41
- >>> clf = Pipeline(estimators)
42
- >>> clf # doctest: +NORMALIZE_WHITESPACE
40
+ >>> estimators = [('reduce_dim', PCA()), ('clf ', SVC())]
41
+ >>> pipe = Pipeline(estimators)
42
+ >>> pipe # doctest: +NORMALIZE_WHITESPACE
43
43
Pipeline(steps=[('reduce_dim', PCA(copy=True, iterated_power=4,
44
44
n_components=None, random_state=None, svd_solver='auto', tol=0.0,
45
- whiten=False)), ('svm ', SVC(C=1.0, cache_size=200, class_weight=None,
45
+ whiten=False)), ('clf ', SVC(C=1.0, cache_size=200, class_weight=None,
46
46
coef0=0.0, decision_function_shape=None, degree=3, gamma='auto',
47
47
kernel='rbf', max_iter=-1, probability=False, random_state=None,
48
48
shrinking=True, tol=0.001, verbose=False))])
49
49
50
-
51
50
The utility function :func: `make_pipeline ` is a shorthand
52
51
for constructing pipelines;
53
52
it takes a variable number of estimators and returns a pipeline,
@@ -64,23 +63,23 @@ filling in the names automatically::
64
63
65
64
The estimators of a pipeline are stored as a list in the ``steps `` attribute::
66
65
67
- >>> clf .steps[0]
66
+ >>> pipe .steps[0]
68
67
('reduce_dim', PCA(copy=True, iterated_power=4, n_components=None, random_state=None,
69
68
svd_solver='auto', tol=0.0, whiten=False))
70
69
71
70
and as a ``dict `` in ``named_steps ``::
72
71
73
- >>> clf .named_steps['reduce_dim']
72
+ >>> pipe .named_steps['reduce_dim']
74
73
PCA(copy=True, iterated_power=4, n_components=None, random_state=None,
75
74
svd_solver='auto', tol=0.0, whiten=False)
76
75
77
76
Parameters of the estimators in the pipeline can be accessed using the
78
77
``<estimator>__<parameter> `` syntax::
79
78
80
- >>> clf .set_params(svm__C =10) # doctest: +NORMALIZE_WHITESPACE
79
+ >>> pipe .set_params(clf__C =10) # doctest: +NORMALIZE_WHITESPACE
81
80
Pipeline(steps=[('reduce_dim', PCA(copy=True, iterated_power=4,
82
81
n_components=None, random_state=None, svd_solver='auto', tol=0.0,
83
- whiten=False)), ('svm ', SVC(C=10, cache_size=200, class_weight=None,
82
+ whiten=False)), ('clf ', SVC(C=10, cache_size=200, class_weight=None,
84
83
coef0=0.0, decision_function_shape=None, degree=3, gamma='auto',
85
84
kernel='rbf', max_iter=-1, probability=False, random_state=None,
86
85
shrinking=True, tol=0.001, verbose=False))])
@@ -90,9 +89,17 @@ This is particularly important for doing grid searches::
90
89
91
90
>>> from sklearn.model_selection import GridSearchCV
92
91
>>> params = dict(reduce_dim__n_components=[2, 5, 10],
93
- ... svm__C=[0.1, 10, 100])
94
- >>> grid_search = GridSearchCV(clf, param_grid=params)
92
+ ... clf__C=[0.1, 10, 100])
93
+ >>> grid_search = GridSearchCV(pipe, param_grid=params)
94
+
95
+ Individual steps may also be replaced as parameters, and non-final steps may be
96
+ ignored by setting them to ``None ``::
95
97
98
+ >>> from sklearn.linear_model import LogisticRegression
99
+ >>> params = dict(reduce_dim=[None, PCA(5), PCA(10)],
100
+ ... clf=[SVC(), LogisticRegression()],
101
+ ... clf__C=[0.1, 10, 100])
102
+ >>> grid_search = GridSearchCV(pipe, param_grid=params)
96
103
97
104
.. topic :: Examples:
98
105
@@ -172,6 +179,15 @@ Like pipelines, feature unions have a shorthand constructor called
172
179
:func: `make_union ` that does not require explicit naming of the components.
173
180
174
181
182
+ Like ``Pipeline ``, individual steps may be replaced using ``set_params ``,
183
+ and ignored by setting to ``None ``::
184
+
185
+ >>> combined.set_params(kernel_pca=None) # doctest: +NORMALIZE_WHITESPACE
186
+ FeatureUnion(n_jobs=1, transformer_list=[('linear_pca', PCA(copy=True,
187
+ iterated_power=4, n_components=None, random_state=None,
188
+ svd_solver='auto', tol=0.0, whiten=False)), ('kernel_pca', None)],
189
+ transformer_weights=None)
190
+
175
191
.. topic :: Examples:
176
192
177
193
* :ref: `sphx_glr_auto_examples_feature_stacker.py `
0 commit comments