From e49c9f09393232c65208f15b271dbe92822924b9 Mon Sep 17 00:00:00 2001 From: Andrei Ivanov Date: Wed, 18 Sep 2024 14:06:28 -0700 Subject: [PATCH 1/2] Removing the deprecated `categorical_feature` parameter from lightgbm.train(...) function calls. --- torch_frame/gbdt/tuned_lightgbm.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/torch_frame/gbdt/tuned_lightgbm.py b/torch_frame/gbdt/tuned_lightgbm.py index 732ad7418..dc466329d 100644 --- a/torch_frame/gbdt/tuned_lightgbm.py +++ b/torch_frame/gbdt/tuned_lightgbm.py @@ -103,7 +103,6 @@ def objective( trial: Any, # optuna.trial.Trial train_data: Any, # lightgbm.Dataset eval_data: Any, # lightgbm.Dataset - cat_features: list[int], num_boost_round: int, ) -> float: r"""Objective function to be optimized. @@ -112,8 +111,6 @@ def objective( trial (optuna.trial.Trial): Optuna trial object. train_data (lightgbm.Dataset): Train data. eval_data (lightgbm.Dataset): Validation data. - cat_features (list[int]): Array containing indexes of - categorical features. num_boost_round (int): Number of boosting round. Returns: @@ -169,7 +166,7 @@ def objective( boost = lightgbm.train( self.params, train_data, num_boost_round=num_boost_round, - categorical_feature=cat_features, valid_sets=[eval_data], + valid_sets=[eval_data], callbacks=[ lightgbm.early_stopping(stopping_rounds=50, verbose=False), lightgbm.log_evaluation(period=2000) @@ -199,18 +196,19 @@ def _tune( assert train_y is not None assert val_y is not None train_data = lightgbm.Dataset(train_x, label=train_y, + categorical_feature=cat_features, free_raw_data=False) eval_data = lightgbm.Dataset(val_x, label=val_y, free_raw_data=False) study.optimize( lambda trial: self.objective(trial, train_data, eval_data, - cat_features, num_boost_round), + num_boost_round), num_trials) self.params.update(study.best_params) self.model = lightgbm.train( self.params, train_data, num_boost_round=num_boost_round, - categorical_feature=cat_features, valid_sets=[eval_data], + valid_sets=[eval_data], callbacks=[ lightgbm.early_stopping(stopping_rounds=50, verbose=False), lightgbm.log_evaluation(period=2000) From 830ce79a4403c35de901067689ed451c42e27996 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Sep 2024 21:20:10 +0000 Subject: [PATCH 2/2] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- torch_frame/gbdt/tuned_lightgbm.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/torch_frame/gbdt/tuned_lightgbm.py b/torch_frame/gbdt/tuned_lightgbm.py index dc466329d..391873226 100644 --- a/torch_frame/gbdt/tuned_lightgbm.py +++ b/torch_frame/gbdt/tuned_lightgbm.py @@ -166,8 +166,7 @@ def objective( boost = lightgbm.train( self.params, train_data, num_boost_round=num_boost_round, - valid_sets=[eval_data], - callbacks=[ + valid_sets=[eval_data], callbacks=[ lightgbm.early_stopping(stopping_rounds=50, verbose=False), lightgbm.log_evaluation(period=2000) ]) @@ -202,14 +201,12 @@ def _tune( study.optimize( lambda trial: self.objective(trial, train_data, eval_data, - num_boost_round), - num_trials) + num_boost_round), num_trials) self.params.update(study.best_params) self.model = lightgbm.train( self.params, train_data, num_boost_round=num_boost_round, - valid_sets=[eval_data], - callbacks=[ + valid_sets=[eval_data], callbacks=[ lightgbm.early_stopping(stopping_rounds=50, verbose=False), lightgbm.log_evaluation(period=2000) ])