Skip to content

Commit 8cc0552

Browse files
authored
Merge pull request #31 from JuliaAI/dev
For a 0.2.4 release
2 parents 5fac426 + 1970915 commit 8cc0552

File tree

5 files changed

+19
-20
lines changed

5 files changed

+19
-20
lines changed

.github/workflows/ci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ jobs:
1818
fail-fast: false
1919
matrix:
2020
version:
21-
- '1.6'
21+
- '1.10'
2222
- '1' # automatically expands to the latest stable 1.x release of Julia.
2323
os:
2424
- ubuntu-latest

Project.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
name = "FeatureSelection"
22
uuid = "33837fe5-dbff-4c9e-8c2f-c5612fe2b8b6"
33
authors = ["Anthony D. Blaom <[email protected]>", "Samuel Okon <[email protected]"]
4-
version = "0.2.3"
4+
version = "0.2.4"
55

66
[deps]
77
MLJModelInterface = "e80e1ace-859a-464e-9ed9-23947d8ae3ea"
@@ -11,15 +11,15 @@ Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
1111
[compat]
1212
Aqua = "0.8"
1313
Distributions = "0.25"
14-
julia = "1.6"
14+
julia = "1.10"
1515
MLJBase = "1.4"
1616
MLJTuning = "0.8"
1717
MLJDecisionTreeInterface = "0.4"
1818
MLJScikitLearnInterface = "0.6"
1919
MLJModelInterface = "1.10"
2020
ScientificTypesBase = "3"
2121
StableRNGs = "1"
22-
StatisticalMeasures = "0.1, 0.2"
22+
StatisticalMeasures = "0.1, 0.2, 0.3"
2323
Tables = "1.2"
2424
Test = "1.6"
2525

docs/src/index.md

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -104,11 +104,10 @@ We can view the important features used by our model by inspecting the `fitted_p
104104
object.
105105
```jldoctest
106106
julia> p = fitted_params(mach)
107-
(features_left = [:x4, :x2, :x1, :x5, :x3],
108-
model_fitresult = (forest = Ensemble of Decision Trees
107+
(features_left = [:x4, :x2, :x1, :x5, :x3], model_fitresult = (forest = Ensemble of Decision Trees
109108
Trees: 100
110109
Avg Leaves: 25.3
111-
Avg Depth: 8.01,),)
110+
Avg Depth: 8.01,))
112111
113112
julia> p.features_left
114113
5-element Vector{Symbol}:
@@ -180,4 +179,4 @@ For resampling methods different from cross-validation, and for other
180179
[MLJ Documentation](https://juliaai.github.io/MLJ.jl/dev/)
181180
```@meta
182181
DocTestSetup = nothing
183-
```
182+
```

test/models/featureselector.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,21 +12,21 @@
1212
# Test feature selection with `features=Symbol[]`
1313
namesX = MLJBase.schema(X).names |> collect
1414
selector = FeatureSelector()
15-
f, = MLJBase.fit(selector, 1, X)
15+
f, = MLJBase.fit(selector, 0, X)
1616
@test f == namesX
1717
Xt = MLJBase.transform(selector, f, MLJBase.selectrows(X, 1:2))
1818
@test Set(MLJBase.schema(Xt).names) == Set(namesX)
1919
@test length(Xt.Zn) == 2
2020

2121
# Test on selecting features if `features` keyword is defined
2222
selector = FeatureSelector(features=[:Zn, :Crim])
23-
f, = MLJBase.fit(selector, 1, X)
23+
f, = MLJBase.fit(selector, 0, X)
2424
@test MLJBase.transform(selector, f, MLJBase.selectrows(X, 1:2)) ==
2525
MLJBase.select(X, 1:2, [:Zn, :Crim])
2626

2727
# test on ignoring a feature, even if it's listed in the `features`
2828
selector.ignore = true
29-
f, = MLJBase.fit(selector, 1, X)
29+
f, = MLJBase.fit(selector, 0, X)
3030
Xnew = MLJBase.transform(selector, f, X)
3131
@test MLJBase.transform(selector, f, MLJBase.selectrows(X, 1:2)) ==
3232
MLJBase.select(X, 1:2, [:x3, :x4])
@@ -35,7 +35,7 @@
3535
selector = FeatureSelector(features=[:x1, :mickey_mouse])
3636
@test_throws(
3737
ArgumentError,
38-
MLJBase.fit(selector, 1, X)
38+
MLJBase.fit(selector, 0, X)
3939
)
4040
selector.ignore = true
4141
@test_logs(
@@ -50,13 +50,13 @@
5050
selector = FeatureSelector(features= x-> x == (:x1))
5151
@test_throws(
5252
ArgumentError,
53-
MLJBase.fit(selector, 1, X)
53+
MLJBase.fit(selector, 0, X)
5454
)
5555
selector.ignore = true
5656
selector.features = x-> x in [:Zn, :Crim, :x3, :x4]
5757
@test_throws(
5858
ArgumentError,
59-
MLJBase.fit(selector, 1, X)
59+
MLJBase.fit(selector, 0, X)
6060
)
6161

6262
# Test model Metadata
@@ -67,4 +67,4 @@ end
6767
# To be added with FeatureSelectorRule X = (n1=["a", "b", "a"], n2=["g", "g", "g"], n3=[7, 8, 9],
6868
# n4 =UInt8[3,5,10], o1=[4.5, 3.6, 4.0], )
6969
# MLJBase.schema(X)
70-
# Xc = coerce(X, :n1=>Multiclass, :n2=>Multiclass)
70+
# Xc = coerce(X, :n1=>Multiclass, :n2=>Multiclass)

test/models/rfe.jl

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -62,9 +62,9 @@ const DTM = DummyTestModels
6262
selector_mach3 = machine(selector3, Xt, y)
6363
selector_mach4 = machine(selector4, Xt, y)
6464

65-
fit!(selector_mach)
66-
fit!(selector_mach2)
67-
fit!(selector_mach3)
65+
fit!(selector_mach, verbosity=0)
66+
fit!(selector_mach2, verbosity=0)
67+
fit!(selector_mach3, verbosity=0)
6868
@test_logs(
6969
(:warn, "n_features > number of features in training data, hence no feature will be eliminated."),
7070
match_mode=:any,
@@ -149,7 +149,7 @@ end
149149
svm = SVR(kernel="linear")
150150
rfe = RecursiveFeatureElimination(model=svm, n_features=5)
151151
mach = machine(rfe, Xs, ys)
152-
fit!(mach)
152+
fit!(mach, verbosity=0)
153153

154154
rfecv = RecursiveFeatureElimination(model=svm)
155155
tuning_rfe_model = TunedModel(
@@ -160,7 +160,7 @@ end
160160
range=range(rfecv, :n_features, values=1:10)
161161
)
162162
self_tuning_rfe_mach = machine(tuning_rfe_model, Xs, ys)
163-
fit!(self_tuning_rfe_mach)
163+
fit!(self_tuning_rfe_mach, verbosity=0)
164164

165165
# Compare results
166166
# Convert MLJ RFE scores to rankings

0 commit comments

Comments
 (0)