Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Gradient Boosting - Remove try/catch around imports #6584

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 2 additions & 8 deletions Orange/classification/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,6 @@
from .sgd import *
from .neural_network import *
from .calibration import *
try:
from .catgb import *
except ModuleNotFoundError:
pass
from .catgb import *
from .gb import *
try:
from .xgb import *
except Exception:
pass
from .xgb import *
10 changes: 2 additions & 8 deletions Orange/modelling/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,6 @@
from .randomforest import *
from .svm import *
from .tree import *
try:
from .catgb import *
except ImportError:
pass
from .catgb import *
from .gb import *
try:
from .xgb import *
except ImportError:
pass
from .xgb import *
10 changes: 2 additions & 8 deletions Orange/regression/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,7 @@
from .tree import *
from .neural_network import *
from ..classification.simple_tree import *
try:
from .catgb import *
except ModuleNotFoundError:
pass
from .catgb import *
from .gb import *
try:
from .xgb import *
except Exception:
pass
from .xgb import *
from .curvefit import *
30 changes: 6 additions & 24 deletions Orange/widgets/model/owgradientboosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,44 +9,26 @@
from Orange.base import Learner
from Orange.data import Table
from Orange.modelling import GBLearner

try:
from Orange.modelling import CatGBLearner
except ImportError:
CatGBLearner = None
try:
from Orange.modelling import XGBLearner, XGBRFLearner
except ImportError:
XGBLearner = XGBRFLearner = None

from Orange.modelling import CatGBLearner
from Orange.modelling import XGBLearner, XGBRFLearner
from Orange.widgets import gui
from Orange.widgets.settings import Setting, SettingProvider
from Orange.widgets.utils.owlearnerwidget import OWBaseLearner
from Orange.widgets.utils.widgetpreview import WidgetPreview


class LearnerItemModel(QStandardItemModel):
LEARNERS = [
(GBLearner, "", ""),
(XGBLearner, "Extreme Gradient Boosting (xgboost)", "xgboost"),
(XGBRFLearner, "Extreme Gradient Boosting Random Forest (xgboost)",
"xgboost"),
(CatGBLearner, "Gradient Boosting (catboost)", "catboost"),
]
LEARNERS = [GBLearner, XGBLearner, XGBRFLearner, CatGBLearner]

def __init__(self, parent):
super().__init__(parent)
self._add_data()

def _add_data(self):
for cls, opt_name, lib in self.LEARNERS:
for cls in self.LEARNERS:
item = QStandardItem()
imported = bool(cls)
name = cls.name if imported else opt_name
name = cls.name
item.setData(f"{name}", Qt.DisplayRole)
item.setEnabled(imported)
if not imported:
item.setToolTip(f"{lib} is not installed")
self.appendRow(item)


Expand Down Expand Up @@ -143,7 +125,7 @@ def _add_main_layout(self):
self._set_lambda_label()

def _set_lambda_label(self):
self.lambda_label.setText("Lambda: {}".format(self.lambda_))
self.lambda_label.setText(f"Lambda: {self.lambda_}")

def get_arguments(self) -> Dict:
params = super().get_arguments()
Expand Down
63 changes: 7 additions & 56 deletions Orange/widgets/model/tests/test_owgradientboosting.py
Original file line number Diff line number Diff line change
@@ -1,32 +1,18 @@
import json
import unittest
import sys
from typing import Type
from unittest.mock import patch, Mock
from unittest.mock import Mock

from Orange.classification import GBClassifier

try:
from Orange.classification import XGBClassifier, XGBRFClassifier
except ImportError:
XGBClassifier = XGBRFClassifier = None
try:
from Orange.classification import CatGBClassifier
except ImportError:
CatGBClassifier = None
from Orange.classification import XGBClassifier, XGBRFClassifier
from Orange.classification import CatGBClassifier
from Orange.data import Table
from Orange.modelling import GBLearner
from Orange.preprocess.score import Scorer
from Orange.regression import GBRegressor

try:
from Orange.regression import XGBRegressor, XGBRFRegressor
except ImportError:
XGBRegressor = XGBRFRegressor = None
try:
from Orange.regression import CatGBRegressor
except ImportError:
CatGBRegressor = None
from Orange.regression import XGBRegressor, XGBRFRegressor
from Orange.regression import CatGBRegressor
from Orange.widgets.model.owgradientboosting import OWGradientBoosting, \
LearnerItemModel, GBLearnerEditor, XGBLearnerEditor, XGBRFLearnerEditor, \
CatGBLearnerEditor, BaseEditor
Expand Down Expand Up @@ -65,16 +51,6 @@ def test_model(self):
self.assertEqual(model.item(i).isEnabled(),
classifiers[i] is not None)

@patch("Orange.widgets.model.owgradientboosting.LearnerItemModel.LEARNERS",
[(GBLearner, "", ""),
(None, "Gradient Boosting (catboost)", "catboost")])
def test_missing_lib(self):
widget = create_parent(CatGBLearnerEditor)
model = LearnerItemModel(widget)
self.assertEqual(model.rowCount(), 2)
self.assertTrue(model.item(0).isEnabled())
self.assertFalse(model.item(1).isEnabled())


class BaseEditorTest(GuiTest):
EditorClass: Type[BaseEditor] = None
Expand Down Expand Up @@ -146,7 +122,6 @@ def test_arguments(self):
"colsample_bynode": 1, "subsample": 1, "random_state": 0}
self.assertDictEqual(self.editor.get_arguments(), args)

@unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package")
def test_learner_parameters(self):
params = (("Method", "Extreme Gradient Boosting (xgboost)"),
("Number of trees", 100),
Expand All @@ -160,7 +135,6 @@ def test_learner_parameters(self):
("Fraction of features for each split", 1))
self.assertTupleEqual(self.editor.get_learner_parameters(), params)

@unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package")
def test_default_parameters_cls(self):
data = Table("heart_disease")
booster = XGBClassifier()
Expand All @@ -178,7 +152,6 @@ def test_default_parameters_cls(self):
self.assertEqual(int(tp["colsample_bylevel"]), self.editor.colsample_bylevel)
self.assertEqual(int(tp["colsample_bynode"]), self.editor.colsample_bynode)

@unittest.skipIf(XGBRegressor is None, "Missing 'xgboost' package")
def test_default_parameters_reg(self):
data = Table("housing")
booster = XGBRegressor()
Expand Down Expand Up @@ -206,7 +179,6 @@ def test_arguments(self):
"colsample_bynode": 1, "subsample": 1, "random_state": 0}
self.assertDictEqual(self.editor.get_arguments(), args)

@unittest.skipIf(XGBRFClassifier is None, "Missing 'xgboost' package")
def test_learner_parameters(self):
params = (("Method",
"Extreme Gradient Boosting Random Forest (xgboost)"),
Expand All @@ -221,7 +193,6 @@ def test_learner_parameters(self):
("Fraction of features for each split", 1))
self.assertTupleEqual(self.editor.get_learner_parameters(), params)

@unittest.skipIf(XGBRFClassifier is None, "Missing 'xgboost' package")
def test_default_parameters_cls(self):
data = Table("heart_disease")
booster = XGBRFClassifier()
Expand All @@ -239,7 +210,6 @@ def test_default_parameters_cls(self):
self.assertEqual(int(tp["colsample_bylevel"]), self.editor.colsample_bylevel)
self.assertEqual(int(tp["colsample_bynode"]), self.editor.colsample_bynode)

@unittest.skipIf(XGBRFRegressor is None, "Missing 'xgboost' package")
def test_default_parameters_reg(self):
data = Table("housing")
booster = XGBRFRegressor()
Expand All @@ -266,7 +236,6 @@ def test_arguments(self):
"reg_lambda": 3, "colsample_bylevel": 1, "random_state": 0}
self.assertDictEqual(self.editor.get_arguments(), args)

@unittest.skipIf(CatGBClassifier is None, "Missing 'catboost' package")
def test_learner_parameters(self):
params = (("Method", "Gradient Boosting (catboost)"),
("Number of trees", 100),
Expand All @@ -277,7 +246,6 @@ def test_learner_parameters(self):
("Fraction of features for each tree", 1))
self.assertTupleEqual(self.editor.get_learner_parameters(), params)

@unittest.skipIf(CatGBClassifier is None, "Missing 'catboost' package")
def test_default_parameters_cls(self):
data = Table("heart_disease")
booster = CatGBClassifier()
Expand All @@ -291,7 +259,6 @@ def test_default_parameters_cls(self):
self.assertEqual(self.editor.learning_rate, 0.3)
# params["learning_rate"] is automatically defined so don't test it

@unittest.skipIf(CatGBRegressor is None, "Missing 'catboost' package")
def test_default_parameters_reg(self):
data = Table("housing")
booster = CatGBRegressor()
Expand All @@ -305,6 +272,7 @@ def test_default_parameters_reg(self):
self.assertEqual(self.editor.learning_rate, 0.3)
# params["learning_rate"] is automatically defined so don't test it


class TestOWGradientBoosting(WidgetTest, WidgetLearnerTestMixin):
def setUp(self):
self.widget = self.create_widget(OWGradientBoosting,
Expand All @@ -328,7 +296,6 @@ def test_datasets(self):
for ds in datasets.datasets():
self.send_signal(self.widget.Inputs.data, ds)

@unittest.skipIf(XGBClassifier is None, "Missing 'xgboost' package")
def test_xgb_params(self):
simulate.combobox_activate_index(self.widget.controls.method_index, 1)
editor = self.widget.editor
Expand All @@ -350,27 +317,11 @@ def test_xgb_params(self):
def test_methods(self):
self.send_signal(self.widget.Inputs.data, self.data)
method_cb = self.widget.controls.method_index
for i, (cls, _, _) in enumerate(LearnerItemModel.LEARNERS):
if cls is None:
continue
for i, cls in enumerate(LearnerItemModel.LEARNERS):
simulate.combobox_activate_index(method_cb, i)
self.click_apply()
self.assertIsInstance(self.widget.learner, cls)

def test_missing_lib(self):
modules = {k: v for k, v in sys.modules.items()
if "orange" not in k.lower()} # retain built-ins
modules["xgboost"] = None
modules["catboost"] = None
# pylint: disable=reimported,redefined-outer-name
# pylint: disable=import-outside-toplevel
with patch.dict(sys.modules, modules, clear=True):
from Orange.widgets.model.owgradientboosting import \
OWGradientBoosting
widget = self.create_widget(OWGradientBoosting,
stored_settings={"method_index": 3})
self.assertEqual(widget.method_index, 0)


if __name__ == "__main__":
unittest.main()
10 changes: 3 additions & 7 deletions i18n/si.jaml
Original file line number Diff line number Diff line change
Expand Up @@ -9596,14 +9596,8 @@ widgets/model/owcurvefit.py:
housing: false
widgets/model/owgradientboosting.py:
class `LearnerItemModel`:
Extreme Gradient Boosting (xgboost): true
xgboost: false
Extreme Gradient Boosting Random Forest (xgboost): true
Gradient Boosting (catboost): true
catboost: false
def `_add_data`:
{name}: false
{lib} is not installed: {lib} ni nameščen
class `BaseEditor`:
def `_add_main_layout`:
callback: false
Expand Down Expand Up @@ -9638,7 +9632,7 @@ widgets/model/owgradientboosting.py:
Regularization:: Regularizacija
lambda_index: false
def `_set_lambda_label`:
'Lambda: {}': true
'Lambda: {self.lambda_}': true
def `get_arguments`:
reg_lambda: false
def `get_learner_parameters`:
Expand Down Expand Up @@ -14893,6 +14887,8 @@ widgets/visualize/utils/customizableplot.py:
{font.family()}: false
font-style: false
{fstyle}: false
foreground: false
color: false
def `update_axes_ticks_font`:
tickFont: false
def `update_legend_font`:
Expand Down
Loading