From ba65ee3e228da12c64152ea75cca1d92c88285ac Mon Sep 17 00:00:00 2001 From: PrimozGodec Date: Thu, 21 Sep 2023 15:23:03 +0200 Subject: [PATCH 1/4] Gradient Boosting - Remove try/catch around imports --- Orange/classification/__init__.py | 10 +-- Orange/modelling/__init__.py | 10 +-- Orange/regression/__init__.py | 10 +-- Orange/widgets/model/owgradientboosting.py | 28 ++------- .../model/tests/test_owgradientboosting.py | 63 +++---------------- 5 files changed, 18 insertions(+), 103 deletions(-) diff --git a/Orange/classification/__init__.py b/Orange/classification/__init__.py index 982498d6f40..d120562137d 100644 --- a/Orange/classification/__init__.py +++ b/Orange/classification/__init__.py @@ -20,12 +20,6 @@ from .sgd import * from .neural_network import * from .calibration import * -try: - from .catgb import * -except ModuleNotFoundError: - pass +from .catgb import * from .gb import * -try: - from .xgb import * -except Exception: - pass +from .xgb import * diff --git a/Orange/modelling/__init__.py b/Orange/modelling/__init__.py index 206151fcdf2..f5b952b1976 100644 --- a/Orange/modelling/__init__.py +++ b/Orange/modelling/__init__.py @@ -11,12 +11,6 @@ from .randomforest import * from .svm import * from .tree import * -try: - from .catgb import * -except ImportError: - pass +from .catgb import * from .gb import * -try: - from .xgb import * -except ImportError: - pass +from .xgb import * diff --git a/Orange/regression/__init__.py b/Orange/regression/__init__.py index 62d24249f4e..be87ff5b42a 100644 --- a/Orange/regression/__init__.py +++ b/Orange/regression/__init__.py @@ -14,13 +14,7 @@ from .tree import * from .neural_network import * from ..classification.simple_tree import * -try: - from .catgb import * -except ModuleNotFoundError: - pass +from .catgb import * from .gb import * -try: - from .xgb import * -except Exception: - pass +from .xgb import * from .curvefit import * diff --git a/Orange/widgets/model/owgradientboosting.py b/Orange/widgets/model/owgradientboosting.py index e99b99b4e21..cfa8f9ba915 100644 --- a/Orange/widgets/model/owgradientboosting.py +++ b/Orange/widgets/model/owgradientboosting.py @@ -9,16 +9,8 @@ from Orange.base import Learner from Orange.data import Table from Orange.modelling import GBLearner - -try: - from Orange.modelling import CatGBLearner -except ImportError: - CatGBLearner = None -try: - from Orange.modelling import XGBLearner, XGBRFLearner -except ImportError: - XGBLearner = XGBRFLearner = None - +from Orange.modelling import CatGBLearner +from Orange.modelling import XGBLearner, XGBRFLearner from Orange.widgets import gui from Orange.widgets.settings import Setting, SettingProvider from Orange.widgets.utils.owlearnerwidget import OWBaseLearner @@ -26,27 +18,17 @@ class LearnerItemModel(QStandardItemModel): - LEARNERS = [ - (GBLearner, "", ""), - (XGBLearner, "Extreme Gradient Boosting (xgboost)", "xgboost"), - (XGBRFLearner, "Extreme Gradient Boosting Random Forest (xgboost)", - "xgboost"), - (CatGBLearner, "Gradient Boosting (catboost)", "catboost"), - ] + LEARNERS = [GBLearner, XGBLearner, XGBRFLearner, CatGBLearner] def __init__(self, parent): super().__init__(parent) self._add_data() def _add_data(self): - for cls, opt_name, lib in self.LEARNERS: + for cls in self.LEARNERS: item = QStandardItem() - imported = bool(cls) - name = cls.name if imported else opt_name + name = cls.name item.setData(f"{name}", Qt.DisplayRole) - item.setEnabled(imported) - if not imported: - item.setToolTip(f"{lib} is not installed") self.appendRow(item) diff --git a/Orange/widgets/model/tests/test_owgradientboosting.py b/Orange/widgets/model/tests/test_owgradientboosting.py index a59de79be60..e18926e8e6d 100644 --- a/Orange/widgets/model/tests/test_owgradientboosting.py +++ b/Orange/widgets/model/tests/test_owgradientboosting.py @@ -1,32 +1,18 @@ import json import unittest -import sys from typing import Type -from unittest.mock import patch, Mock +from unittest.mock import Mock from Orange.classification import GBClassifier -try: - from Orange.classification import XGBClassifier, XGBRFClassifier -except ImportError: - XGBClassifier = XGBRFClassifier = None -try: - from Orange.classification import CatGBClassifier -except ImportError: - CatGBClassifier = None +from Orange.classification import XGBClassifier, XGBRFClassifier +from Orange.classification import CatGBClassifier from Orange.data import Table -from Orange.modelling import GBLearner from Orange.preprocess.score import Scorer from Orange.regression import GBRegressor -try: - from Orange.regression import XGBRegressor, XGBRFRegressor -except ImportError: - XGBRegressor = XGBRFRegressor = None -try: - from Orange.regression import CatGBRegressor -except ImportError: - CatGBRegressor = None +from Orange.regression import XGBRegressor, XGBRFRegressor +from Orange.regression import CatGBRegressor from Orange.widgets.model.owgradientboosting import OWGradientBoosting, \ LearnerItemModel, GBLearnerEditor, XGBLearnerEditor, XGBRFLearnerEditor, \ CatGBLearnerEditor, BaseEditor @@ -65,16 +51,6 @@ def test_model(self): self.assertEqual(model.item(i).isEnabled(), classifiers[i] is not None) - @patch("Orange.widgets.model.owgradientboosting.LearnerItemModel.LEARNERS", - [(GBLearner, "", ""), - (None, "Gradient Boosting (catboost)", "catboost")]) - def test_missing_lib(self): - widget = create_parent(CatGBLearnerEditor) - model = LearnerItemModel(widget) - self.assertEqual(model.rowCount(), 2) - self.assertTrue(model.item(0).isEnabled()) - self.assertFalse(model.item(1).isEnabled()) - class BaseEditorTest(GuiTest): EditorClass: Type[BaseEditor] = None @@ -146,7 +122,6 @@ def test_arguments(self): "colsample_bynode": 1, "subsample": 1, "random_state": 0} self.assertDictEqual(self.editor.get_arguments(), args) - @unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package") def test_learner_parameters(self): params = (("Method", "Extreme Gradient Boosting (xgboost)"), ("Number of trees", 100), @@ -160,7 +135,6 @@ def test_learner_parameters(self): ("Fraction of features for each split", 1)) self.assertTupleEqual(self.editor.get_learner_parameters(), params) - @unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package") def test_default_parameters_cls(self): data = Table("heart_disease") booster = XGBClassifier() @@ -178,7 +152,6 @@ def test_default_parameters_cls(self): self.assertEqual(int(tp["colsample_bylevel"]), self.editor.colsample_bylevel) self.assertEqual(int(tp["colsample_bynode"]), self.editor.colsample_bynode) - @unittest.skipIf(XGBRegressor is None, "Missing 'xgboost' package") def test_default_parameters_reg(self): data = Table("housing") booster = XGBRegressor() @@ -206,7 +179,6 @@ def test_arguments(self): "colsample_bynode": 1, "subsample": 1, "random_state": 0} self.assertDictEqual(self.editor.get_arguments(), args) - @unittest.skipIf(XGBRFClassifier is None, "Missing 'xgboost' package") def test_learner_parameters(self): params = (("Method", "Extreme Gradient Boosting Random Forest (xgboost)"), @@ -221,7 +193,6 @@ def test_learner_parameters(self): ("Fraction of features for each split", 1)) self.assertTupleEqual(self.editor.get_learner_parameters(), params) - @unittest.skipIf(XGBRFClassifier is None, "Missing 'xgboost' package") def test_default_parameters_cls(self): data = Table("heart_disease") booster = XGBRFClassifier() @@ -239,7 +210,6 @@ def test_default_parameters_cls(self): self.assertEqual(int(tp["colsample_bylevel"]), self.editor.colsample_bylevel) self.assertEqual(int(tp["colsample_bynode"]), self.editor.colsample_bynode) - @unittest.skipIf(XGBRFRegressor is None, "Missing 'xgboost' package") def test_default_parameters_reg(self): data = Table("housing") booster = XGBRFRegressor() @@ -266,7 +236,6 @@ def test_arguments(self): "reg_lambda": 3, "colsample_bylevel": 1, "random_state": 0} self.assertDictEqual(self.editor.get_arguments(), args) - @unittest.skipIf(CatGBClassifier is None, "Missing 'catboost' package") def test_learner_parameters(self): params = (("Method", "Gradient Boosting (catboost)"), ("Number of trees", 100), @@ -277,7 +246,6 @@ def test_learner_parameters(self): ("Fraction of features for each tree", 1)) self.assertTupleEqual(self.editor.get_learner_parameters(), params) - @unittest.skipIf(CatGBClassifier is None, "Missing 'catboost' package") def test_default_parameters_cls(self): data = Table("heart_disease") booster = CatGBClassifier() @@ -291,7 +259,6 @@ def test_default_parameters_cls(self): self.assertEqual(self.editor.learning_rate, 0.3) # params["learning_rate"] is automatically defined so don't test it - @unittest.skipIf(CatGBRegressor is None, "Missing 'catboost' package") def test_default_parameters_reg(self): data = Table("housing") booster = CatGBRegressor() @@ -305,6 +272,7 @@ def test_default_parameters_reg(self): self.assertEqual(self.editor.learning_rate, 0.3) # params["learning_rate"] is automatically defined so don't test it + class TestOWGradientBoosting(WidgetTest, WidgetLearnerTestMixin): def setUp(self): self.widget = self.create_widget(OWGradientBoosting, @@ -328,7 +296,6 @@ def test_datasets(self): for ds in datasets.datasets(): self.send_signal(self.widget.Inputs.data, ds) - @unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package") def test_xgb_params(self): simulate.combobox_activate_index(self.widget.controls.method_index, 1) editor = self.widget.editor @@ -350,27 +317,11 @@ def test_xgb_params(self): def test_methods(self): self.send_signal(self.widget.Inputs.data, self.data) method_cb = self.widget.controls.method_index - for i, (cls, _, _) in enumerate(LearnerItemModel.LEARNERS): - if cls is None: - continue + for i, cls in enumerate(LearnerItemModel.LEARNERS): simulate.combobox_activate_index(method_cb, i) self.click_apply() self.assertIsInstance(self.widget.learner, cls) - def test_missing_lib(self): - modules = {k: v for k, v in sys.modules.items() - if "orange" not in k.lower()} # retain built-ins - modules["xgboost"] = None - modules["catboost"] = None - # pylint: disable=reimported,redefined-outer-name - # pylint: disable=import-outside-toplevel - with patch.dict(sys.modules, modules, clear=True): - from Orange.widgets.model.owgradientboosting import \ - OWGradientBoosting - widget = self.create_widget(OWGradientBoosting, - stored_settings={"method_index": 3}) - self.assertEqual(widget.method_index, 0) - if __name__ == "__main__": unittest.main() From 3364ea4dc239236f633cfe3497b8c1d407e8344e Mon Sep 17 00:00:00 2001 From: PrimozGodec Date: Thu, 21 Sep 2023 15:40:37 +0200 Subject: [PATCH 2/4] Making lint happier --- Orange/widgets/model/owgradientboosting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Orange/widgets/model/owgradientboosting.py b/Orange/widgets/model/owgradientboosting.py index cfa8f9ba915..0baba814dda 100644 --- a/Orange/widgets/model/owgradientboosting.py +++ b/Orange/widgets/model/owgradientboosting.py @@ -125,7 +125,7 @@ def _add_main_layout(self): self._set_lambda_label() def _set_lambda_label(self): - self.lambda_label.setText("Lambda: {}".format(self.lambda_)) + self.lambda_label.setText(f"Lambda: {self.lambda_}") def get_arguments(self) -> Dict: params = super().get_arguments() From cae4290bf48513e006bceafc7ca053b586fec56f Mon Sep 17 00:00:00 2001 From: PrimozGodec Date: Thu, 11 Jan 2024 23:14:27 +0100 Subject: [PATCH 3/4] Gradient boosting - Fix translations --- i18n/si.jaml | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/i18n/si.jaml b/i18n/si.jaml index e39b24274ee..8318999074f 100644 --- a/i18n/si.jaml +++ b/i18n/si.jaml @@ -9596,14 +9596,8 @@ widgets/model/owcurvefit.py: housing: false widgets/model/owgradientboosting.py: class `LearnerItemModel`: - Extreme Gradient Boosting (xgboost): true - xgboost: false - Extreme Gradient Boosting Random Forest (xgboost): true - Gradient Boosting (catboost): true - catboost: false def `_add_data`: {name}: false - {lib} is not installed: {lib} ni nameščen class `BaseEditor`: def `_add_main_layout`: callback: false @@ -9638,7 +9632,7 @@ widgets/model/owgradientboosting.py: Regularization:: Regularizacija lambda_index: false def `_set_lambda_label`: - 'Lambda: {}': true + 'Lambda: {self.lambda_}': true def `get_arguments`: reg_lambda: false def `get_learner_parameters`: From 360b7b3290d8427c90de679f6b3503867e59acaa Mon Sep 17 00:00:00 2001 From: PrimozGodec Date: Fri, 12 Jan 2024 08:43:07 +0100 Subject: [PATCH 4/4] customizableplot - Fix translations --- i18n/si.jaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/i18n/si.jaml b/i18n/si.jaml index 8318999074f..1cd43608fe3 100644 --- a/i18n/si.jaml +++ b/i18n/si.jaml @@ -14887,6 +14887,8 @@ widgets/visualize/utils/customizableplot.py: {font.family()}: false font-style: false {fstyle}: false + foreground: false + color: false def `update_axes_ticks_font`: tickFont: false def `update_legend_font`: