Skip to content

Commit afcd719

Browse files
authored
Merge pull request #153 from JuliaAI/dev
For a 0.6.13 release
2 parents a47019b + d9ce57f commit afcd719

File tree

4 files changed

+22
-28
lines changed

4 files changed

+22
-28
lines changed

Project.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "MLJTuning"
22
uuid = "03970b2e-30c4-11ea-3135-d1576263f10f"
33
authors = ["Anthony D. Blaom <anthony.blaom@gmail.com>"]
4-
version = "0.6.12"
4+
version = "0.6.13"
55

66
[deps]
77
ComputationalResources = "ed09eef8-17a6-5b46-8889-db040fac31e3"

src/learning_curves.jl

Lines changed: 10 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
measure=default_measure(machine.model),
88
rows=nothing,
99
weights=nothing,
10-
operation=predict,
10+
operation=nothing,
1111
range=nothing,
1212
acceleration=default_resource(),
1313
acceleration_grid=CPU1(),
@@ -26,8 +26,8 @@ the (possibly nested) RNG field, and a vector `rngs` of RNG's, one for
2626
each curve. Alternatively, set `rngs` to the number of curves desired,
2727
in which case RNG's are automatically generated. The individual curve
2828
computations can be distributed across multiple processes using
29-
`acceleration=CPUProcesses()` or `acceleration=CPUThreads()`. See the second example below for a
30-
demonstration.
29+
`acceleration=CPUProcesses()` or `acceleration=CPUThreads()`. See the
30+
second example below for a demonstration.
3131
3232
```julia
3333
X, y = @load_boston;
@@ -73,27 +73,6 @@ a machine.
7373
- `resolution` - number of points generated from `range` (number model
7474
evaluations); default is `30`
7575
76-
- `resampling` - resampling strategy; default is `Holdout(fraction_train=0.7)`
77-
78-
- `repeats` - set to more than `1` for repeated (Monte Carlo) resampling
79-
80-
- `measure` - performance measure (metric); automatically inferred
81-
from model by default when possible
82-
83-
- `rows` - row indices to which resampling should be restricted;
84-
default is all rows
85-
86-
- `weights` - sample weights used by `measure` where supported
87-
88-
- `operation` - operation, such as `predict`, to be used in
89-
evaluations. If `prediction_type(mach.model) == :probabilistic` but
90-
`prediction_type(measure) == :deterministic` consider `,`predict_mode`,
91-
`predict_mode` or `predict_median`; default is `predict`.
92-
93-
- `range` - object constructed using `range(model, ...)` or
94-
`range(type, ...)` representing one-dimensional hyper-parameter
95-
range.
96-
9776
- `acceleration` - parallelization option for passing to `evaluate!`;
9877
an instance of `CPU1`, `CPUProcesses` or `CPUThreads` from the
9978
`ComputationalResources.jl`; default is `default_resource()`
@@ -107,13 +86,18 @@ a machine.
10786
- `rng_name` - name of the model hyper-parameter representing a random
10887
number generator (see above); possibly nested
10988
89+
Other key-word options are documented at [`TunedModel`](@ref).
90+
11091
"""
11192
learning_curve(mach::Machine{<:Supervised}; kwargs...) =
11293
learning_curve(mach.model, mach.args...; kwargs...)
11394

11495
# for backwards compatibility
115-
learning_curve!(mach::Machine{<:Supervised}; kwargs...) =
96+
function learning_curve!(mach::Machine{<:Supervised}; kwargs...)
97+
Base.depwarn("`learning_curve!` is deprecated, use `learning_curve` instead. ",
98+
Core.Typeof(learning_curve!).name.mt.name)
11699
learning_curve(mach; kwargs...)
100+
end
117101

118102
function learning_curve(model::Supervised, args...;
119103
resolution=30,
@@ -122,7 +106,7 @@ function learning_curve(model::Supervised, args...;
122106
measures=nothing,
123107
measure=measures,
124108
rows=nothing,
125-
operation=predict,
109+
operation=nothing,
126110
ranges::Union{Nothing,ParamRange}=nothing,
127111
range::Union{Nothing,ParamRange},
128112
repeats=1,

src/tuned_models.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ hyper-parameters are to be mutated.
7979
tuned_model = TunedModel(; models=<models to be compared>,
8080
resampling=Holdout(),
8181
measure=nothing,
82-
n=default_n(tuning, range),
82+
n=length(models),
8383
operation=nothing,
8484
other_options...)
8585

test/learning_curves.jl

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -206,6 +206,16 @@ end
206206

207207
end
208208

209+
@testset "deprecation of learning_curve!" begin
210+
atom = KNNRegressor()
211+
mach = machine(atom, X, y)
212+
r = range(atom, :K, lower=1, upper=2)
213+
@test_deprecated learning_curve!(mach;
214+
range=r,
215+
measure=LPLoss(),
216+
verbosity=0)
217+
218+
end
209219

210220
end # module
211221
true

0 commit comments

Comments
 (0)