Skip to content

Commit

Permalink
Merge pull request #532 from JuliaAI/dev
Browse files Browse the repository at this point in the history
For a 0.16.12 release
  • Loading branch information
ablaom authored Oct 6, 2023
2 parents c9f8feb + a985207 commit 961c13a
Show file tree
Hide file tree
Showing 8 changed files with 436 additions and 19 deletions.
5 changes: 5 additions & 0 deletions .github/codecov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
coverage:
status:
project:
default:
threshold: 0.5%
2 changes: 1 addition & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "MLJModels"
uuid = "d491faf4-2d78-11e9-2867-c94bc002c0b7"
authors = ["Anthony D. Blaom <[email protected]>"]
version = "0.16.11"
version = "0.16.12"

[deps]
CategoricalArrays = "324d7699-5711-5eae-9e2f-1d82baa6b597"
Expand Down
436 changes: 422 additions & 14 deletions src/registry/Metadata.toml

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions src/registry/Models.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ ParallelKMeans = ["KMeans"]
NaiveBayes = ["GaussianNBClassifier", "MultinomialNBClassifier"]
MultivariateStats = ["LDA", "MultitargetLinearRegressor", "BayesianSubspaceLDA", "FactorAnalysis", "LinearRegressor", "ICA", "PPCA", "RidgeRegressor", "KernelPCA", "MultitargetRidgeRegressor", "SubspaceLDA", "BayesianLDA", "PCA"]
DecisionTree = ["AdaBoostStumpClassifier", "DecisionTreeRegressor", "DecisionTreeClassifier", "RandomForestRegressor", "RandomForestClassifier"]
MLJBalancing = ["BalancedBaggingClassifier"]
Imbalance = ["RandomOversampler", "SMOTENC", "TomekUndersampler", "ClusterUndersampler", "SMOTE", "SMOTEN", "ROSE", "RandomUndersampler", "ENNUndersampler", "BorderlineSMOTE1", "RandomWalkOversampler"]
Clustering = ["HierarchicalClustering", "DBSCAN", "KMeans", "KMedoids"]
EvoLinear = ["EvoSplineRegressor", "EvoLinearRegressor"]
XGBoost = ["XGBoostCount", "XGBoostRegressor", "XGBoostClassifier"]
Expand Down
2 changes: 2 additions & 0 deletions src/registry/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@ BetaML = "024491cd-cc6b-443e-8034-08ea7eb7db2b"
CatBoost = "e2e10f9a-a85d-4fa9-b6b2-639a32100a12"
EvoLinear = "ab853011-1780-437f-b4b5-5de6f4777246"
EvoTrees = "f6006082-12f8-11e9-0c9c-0d5d367ab1e5"
Imbalance = "c709b415-507b-45b7-9a3d-1767c89fde68"
InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
LightGBM = "7acf609c-83a4-11e9-1ffb-b912bcd3b04a"
MLJBalancing = "45f359ea-796d-4f51-95a5-deb1a414c586"
MLJBase = "a7f614a8-145f-11e9-1d2a-a57a1082229d"
MLJClusteringInterface = "d354fa79-ed1c-40d4-88ef-b8c7bd1568af"
MLJDecisionTreeInterface = "c6f25543-311c-4c74-83dc-3ea6d1015661"
Expand Down
2 changes: 1 addition & 1 deletion src/registry/src/Registry.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
module Registry
module Registry

using Pkg
import Pkg.TOML
Expand Down
2 changes: 1 addition & 1 deletion src/registry/src/update.jl
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ To register all the models in GreatNewPackage with MLJ:
- Quit your REPL session and make a trivial commit to your MLJModels
branch to force pre-compilation in a new julia session when you run
`using MLJModels`. (For technical reasons the registry is not loaded
in `__init__`()`, so without pre-compiliation the new ]registry is not
in `__init__()`, so without pre-compiliation the new ]registry is not
available.)
- Test that the interfaces load properly with
Expand Down
4 changes: 2 additions & 2 deletions test/builtins/ThresholdPredictors.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ using CategoricalDistributions
import Distributions
import MLJBase


const MMI = MLJModels.MLJModelInterface

X_ = NamedTuple{(:x1,:x2,:x3)}((rand(4), rand(4), rand(4)))
Expand Down Expand Up @@ -163,9 +162,10 @@ MMI.input_scitype(::Type{<:DummyDetector}) = MMI.Table
@test MMI.predict(detector, fr, X) == fill("in", length(y_))

# integration (y == ["in", "out", "out", "out"]):
accuracy(yhat, y) = sum(yhat .== y)/length(y)
e = MLJBase.evaluate(detector, X_, y_,
resampling=MLJBase.Holdout(fraction_train=0.5),
measure=MLJBase.accuracy)
measure=accuracy)
@test e.measurement[1] 0
end

Expand Down

0 comments on commit 961c13a

Please sign in to comment.