Skip to content

Commit

Permalink
Allways install xgboost and catboost
Browse files Browse the repository at this point in the history
  • Loading branch information
PrimozGodec committed Sep 15, 2023
1 parent 24596fe commit 1948bf8
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions Orange/widgets/model/tests/test_owgradientboosting.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,14 @@
from Orange.widgets.widget import OWWidget


def get_tree_train_params(model):
ln = json.loads(model.skl_model.get_booster().save_config())["learner"]
try:
return ln["gradient_booster"]["tree_train_param"]
except KeyError:
return ln["gradient_booster"]["updater"]["grow_colmaker"]["train_param"]


def create_parent(editor_class):
class DummyWidget(OWWidget):
name = "Mock"
Expand Down Expand Up @@ -158,9 +166,7 @@ def test_default_parameters_cls(self):
booster = XGBClassifier()
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = get_tree_train_params(model)
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand All @@ -178,9 +184,7 @@ def test_default_parameters_reg(self):
booster = XGBRegressor()
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = get_tree_train_params(model)
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand Down Expand Up @@ -223,9 +227,7 @@ def test_default_parameters_cls(self):
booster = XGBRFClassifier()
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = get_tree_train_params(model)
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand All @@ -243,9 +245,7 @@ def test_default_parameters_reg(self):
booster = XGBRFRegressor()
model = booster(data)
params = model.skl_model.get_params()
booster_params = json.loads(model.skl_model.get_booster().save_config())
updater = booster_params["learner"]["gradient_booster"]["updater"]
tp = updater["grow_colmaker"]["train_param"]
tp = get_tree_train_params(model)
self.assertEqual(params["n_estimators"], self.editor.n_estimators)
self.assertEqual(
round(float(tp["learning_rate"]), 1), self.editor.learning_rate
Expand Down

0 comments on commit 1948bf8

Please sign in to comment.