Skip to content

Commit

Permalink
Merge pull request #153 from JuliaAI/dev
Browse files Browse the repository at this point in the history
For a 0.6.13 release
  • Loading branch information
ablaom authored Sep 23, 2021
2 parents a47019b + d9ce57f commit afcd719
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 28 deletions.
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "MLJTuning"
uuid = "03970b2e-30c4-11ea-3135-d1576263f10f"
authors = ["Anthony D. Blaom <[email protected]>"]
version = "0.6.12"
version = "0.6.13"

[deps]
ComputationalResources = "ed09eef8-17a6-5b46-8889-db040fac31e3"
Expand Down
36 changes: 10 additions & 26 deletions src/learning_curves.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
measure=default_measure(machine.model),
rows=nothing,
weights=nothing,
operation=predict,
operation=nothing,
range=nothing,
acceleration=default_resource(),
acceleration_grid=CPU1(),
Expand All @@ -26,8 +26,8 @@ the (possibly nested) RNG field, and a vector `rngs` of RNG's, one for
each curve. Alternatively, set `rngs` to the number of curves desired,
in which case RNG's are automatically generated. The individual curve
computations can be distributed across multiple processes using
`acceleration=CPUProcesses()` or `acceleration=CPUThreads()`. See the second example below for a
demonstration.
`acceleration=CPUProcesses()` or `acceleration=CPUThreads()`. See the
second example below for a demonstration.
```julia
X, y = @load_boston;
Expand Down Expand Up @@ -73,27 +73,6 @@ a machine.
- `resolution` - number of points generated from `range` (number model
evaluations); default is `30`
- `resampling` - resampling strategy; default is `Holdout(fraction_train=0.7)`
- `repeats` - set to more than `1` for repeated (Monte Carlo) resampling
- `measure` - performance measure (metric); automatically inferred
from model by default when possible
- `rows` - row indices to which resampling should be restricted;
default is all rows
- `weights` - sample weights used by `measure` where supported
- `operation` - operation, such as `predict`, to be used in
evaluations. If `prediction_type(mach.model) == :probabilistic` but
`prediction_type(measure) == :deterministic` consider `,`predict_mode`,
`predict_mode` or `predict_median`; default is `predict`.
- `range` - object constructed using `range(model, ...)` or
`range(type, ...)` representing one-dimensional hyper-parameter
range.
- `acceleration` - parallelization option for passing to `evaluate!`;
an instance of `CPU1`, `CPUProcesses` or `CPUThreads` from the
`ComputationalResources.jl`; default is `default_resource()`
Expand All @@ -107,13 +86,18 @@ a machine.
- `rng_name` - name of the model hyper-parameter representing a random
number generator (see above); possibly nested
Other key-word options are documented at [`TunedModel`](@ref).
"""
learning_curve(mach::Machine{<:Supervised}; kwargs...) =
learning_curve(mach.model, mach.args...; kwargs...)

# for backwards compatibility
learning_curve!(mach::Machine{<:Supervised}; kwargs...) =
function learning_curve!(mach::Machine{<:Supervised}; kwargs...)
Base.depwarn("`learning_curve!` is deprecated, use `learning_curve` instead. ",
Core.Typeof(learning_curve!).name.mt.name)
learning_curve(mach; kwargs...)
end

function learning_curve(model::Supervised, args...;
resolution=30,
Expand All @@ -122,7 +106,7 @@ function learning_curve(model::Supervised, args...;
measures=nothing,
measure=measures,
rows=nothing,
operation=predict,
operation=nothing,
ranges::Union{Nothing,ParamRange}=nothing,
range::Union{Nothing,ParamRange},
repeats=1,
Expand Down
2 changes: 1 addition & 1 deletion src/tuned_models.jl
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ hyper-parameters are to be mutated.
tuned_model = TunedModel(; models=<models to be compared>,
resampling=Holdout(),
measure=nothing,
n=default_n(tuning, range),
n=length(models),
operation=nothing,
other_options...)
Expand Down
10 changes: 10 additions & 0 deletions test/learning_curves.jl
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,16 @@ end

end

@testset "deprecation of learning_curve!" begin
atom = KNNRegressor()
mach = machine(atom, X, y)
r = range(atom, :K, lower=1, upper=2)
@test_deprecated learning_curve!(mach;
range=r,
measure=LPLoss(),
verbosity=0)

end

end # module
true

0 comments on commit afcd719

Please sign in to comment.