Skip to content

Commit

Permalink
scikitlearn v1.4 support
Browse files Browse the repository at this point in the history
  • Loading branch information
tylerjthomas9 committed Jan 20, 2024
1 parent 06f7add commit 01244d3
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 21 deletions.
2 changes: 1 addition & 1 deletion CondaPkg.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@

[deps.scikit-learn]
channel = "conda-forge"
version = ">=1.2, <1.4"
version = ">=1.4, <1.5"
7 changes: 2 additions & 5 deletions src/models/clustering.jl
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ meta(AffinityPropagation,
const AgglomerativeClustering_ = skcl(:AgglomerativeClustering)
@sk_uns mutable struct AgglomerativeClustering <: MMI.Unsupervised
n_clusters::Int = 2::(_ ≥ 1)
# replace `affinity` parameter with `metric` when scikit learn releases v1.4
affinity::String = "euclidean"::(_ in ("euclidean", "l1", "l2", "manhattan", "cosine", "precomputed"))
#metric::Any = nothing::(_ isa Union{Nothing, Function} || _ in ("euclidean", "l1", "l2", "manhattan", "cosine", "precomputed"))
metric::String = "euclidean"::(_ in ("euclidean", "l1", "l2", "manhattan", "cosine", "precomputed"))
memory::Any = nothing
connectivity::Any = nothing
compute_full_tree::Union{String,Bool} = "auto"::(_ isa Bool || _ == "auto")
Expand Down Expand Up @@ -187,8 +185,7 @@ const FeatureAgglomeration_ = skcl(:FeatureAgglomeration)
connectivity::Any = nothing
# XXX unclear how to pass a proper callable here; just passing mean = nok
# pooling_func::Function = mean
# replace `affinity` parameter with `metric` when scikit learn releases v1.4
affinity::Any = "euclidean"::(_ isa Function || _ in ("euclidean", "l1", "l2", "manhattan", "cosine", "precomputed"))
metric::Any = "euclidean"::(_ isa Function || _ in ("euclidean", "l1", "l2", "manhattan", "cosine", "precomputed"))
compute_full_tree::Union{String,Bool} = "auto"::(_ isa Bool || _ == "auto")
linkage::String = "ward"::(_ in ("ward", "complete", "average", "single"))
distance_threshold::Option{Float64} = nothing
Expand Down
12 changes: 10 additions & 2 deletions src/models/ensemble.jl
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,9 @@ const ExtraTreesRegressor_ = sken(:ExtraTreesRegressor)
random_state::Any = nothing
verbose::Int = 0
warm_start::Bool = false
ccp_alpha::Float64 = 0.0
max_samples::Option{Union{Int64,Float64,Nothing}} = nothing
monotonic_cst::Option{Union{Vector, Dict}} = nothing
end
@sk_feature_importances ExtraTreesRegressor
MMI.fitted_params(m::ExtraTreesRegressor, (f, _, _)) = (
Expand Down Expand Up @@ -216,6 +219,9 @@ const ExtraTreesClassifier_ = sken(:ExtraTreesClassifier)
verbose::Int = 0
warm_start::Bool = false
class_weight::Any = nothing
ccp_alpha::Float64 = 0.0
max_samples::Option{Union{Int64,Float64,Nothing}} = nothing
monotonic_cst::Option{Union{Vector, Dict}} = nothing
end
@sk_feature_importances ExtraTreesClassifier
MMI.fitted_params(m::ExtraTreesClassifier, (f, _, _)) = (
Expand Down Expand Up @@ -367,6 +373,7 @@ const RandomForestRegressor_ = sken(:RandomForestRegressor)
ccp_alpha::Float64 =0.0::(_ ≥ 0)
max_samples::Union{Nothing,Float64,Int} =
nothing::(_ === nothing || (_ ≥ 0 && (_ isa Integer || _ ≤ 1)))
monotonic_cst::Option{Union{Vector, Dict}} = nothing
end
@sk_feature_importances RandomForestRegressor
MMI.fitted_params(model::RandomForestRegressor, (f, _, _)) = (
Expand Down Expand Up @@ -418,6 +425,7 @@ const RandomForestClassifier_ = sken(:RandomForestClassifier)
ccp_alpha::Float64 =0.0::(_ ≥ 0)
max_samples::Union{Nothing,Float64,Int} =
nothing::(_ === nothing || (_ ≥ 0 && (_ isa Integer || _ ≤ 1)))
monotonic_cst::Option{Union{Vector, Dict}} = nothing
end
@sk_feature_importances RandomForestClassifier
MMI.fitted_params(m::RandomForestClassifier, (f, _, _)) = (
Expand Down Expand Up @@ -463,7 +471,7 @@ const HistGradientBoostingRegressor_ = sken(:HistGradientBoostingRegressor)
max_bins::Int = 255
categorical_features::Option{Vector} = nothing
monotonic_cst::Option{Union{Vector, Dict}} = nothing
# interaction_cst
interaction_cst::Any = nothing
warm_start::Bool = false
early_stopping::Union{String, Bool} = "auto"::(_ in ("auto", true, false))
scoring::String = "loss"
Expand Down Expand Up @@ -507,7 +515,7 @@ const HistGradientBoostingClassifier_ = sken(:HistGradientBoostingClassifier)
max_bins::Int = 255
categorical_features::Option{Vector} = nothing
monotonic_cst::Option{Union{Vector, Dict}} = nothing
# interaction_cst
interaction_cst::Any = nothing
warm_start::Bool = false
early_stopping::Union{String, Bool} = "auto"::(_ in ("auto",) || _ isa Bool)
scoring::String = "loss"
Expand Down
13 changes: 0 additions & 13 deletions src/models/linear-regressors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,6 @@ const LarsRegressor_ = sklm(:Lars)
@sk_reg mutable struct LarsRegressor <: MMI.Deterministic
fit_intercept::Bool = true
verbose::Union{Bool,Int} = false
# TODO Remove this when python ScikitLearn releases v1.4
normalize::Bool = false
precompute::Union{Bool,String,AbstractMatrix} = "auto"
n_nonzero_coefs::Int = 500::(_ > 0)
eps::Float64 = eps(Float64)::(_ > 0)
Expand All @@ -148,8 +146,6 @@ const LarsCVRegressor_ = sklm(:LarsCV)
fit_intercept::Bool = true
verbose::Union{Bool,Int} = false
max_iter::Int = 500::(_ > 0)
# TODO Remove this when python ScikitLearn releases v1.4
normalize::Bool = false
precompute::Union{Bool,String,AbstractMatrix} = "auto"
cv::Any = 5
max_n_alphas::Int = 1_000::(_ > 0)
Expand Down Expand Up @@ -224,8 +220,6 @@ const LassoLarsRegressor_ = sklm(:LassoLars)
alpha::Float64 = 1.0::(_ ≥ 0) # 0 should be OLS
fit_intercept::Bool = true
verbose::Union{Bool, Int} = false
# TODO Remove this when python ScikitLearn releases v1.4
normalize::Bool = false
precompute::Union{Bool,String,AbstractMatrix} = "auto"
max_iter::Int = 500::(_ > 0)
eps::Float64 = eps(Float64)::(_ > 0)
Expand All @@ -249,8 +243,6 @@ const LassoLarsCVRegressor_ = sklm(:LassoLarsCV)
fit_intercept::Bool = true
verbose::Union{Bool, Int} = false
max_iter::Int = 500::(_ > 0)
# TODO Remove this when python ScikitLearn releases v1.4
normalize::Bool = false
precompute::Union{Bool,String,AbstractMatrix} = "auto"
cv::Any = 5
max_n_alphas::Int = 1_000::(_ > 0)
Expand Down Expand Up @@ -278,8 +270,6 @@ const LassoLarsICRegressor_ = sklm(:LassoLarsIC)
criterion::String = "aic"::(_ in ("aic","bic"))
fit_intercept::Bool = true
verbose::Union{Bool, Int} = false
# TODO Remove this when python ScikitLearn releases v1.4
normalize::Bool = false
precompute::Union{Bool,String,AbstractMatrix} = "auto"
max_iter::Int = 500::(_ > 0)
eps::Float64 = eps(Float64)::(_ > 0.0)
Expand Down Expand Up @@ -315,7 +305,6 @@ const OrthogonalMatchingPursuitRegressor_ = sklm(:OrthogonalMatchingPursuit)
n_nonzero_coefs::Option{Int} = nothing
tol::Option{Float64} = nothing
fit_intercept::Bool = true
normalize::Bool = false
precompute::Union{Bool,String,AbstractMatrix} = "auto"
end
MMI.fitted_params(model::OrthogonalMatchingPursuitRegressor, (fitresult, _, _)) = (
Expand All @@ -329,8 +318,6 @@ const OrthogonalMatchingPursuitCVRegressor_ = sklm(:OrthogonalMatchingPursuitCV)
@sk_reg mutable struct OrthogonalMatchingPursuitCVRegressor <: MMI.Deterministic
copy::Bool = true
fit_intercept::Bool = true
# TODO Remove this when python ScikitLearn releases v1.4
normalize::Bool = false
max_iter::Option{Int} = nothing::(_ === nothing||_ > 0)
cv::Any = 5
n_jobs::Option{Int} = 1
Expand Down

0 comments on commit 01244d3

Please sign in to comment.