Skip to content

Commit 9445cd3

Browse files
run pre-commit
1 parent ff4c878 commit 9445cd3

File tree

6 files changed

+6
-18
lines changed

6 files changed

+6
-18
lines changed

doubleml/tests/test_apo_tune_ml_models.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,4 +41,3 @@ def test_doubleml_apo_optuna_tune(sampler_name, optuna_sampler):
4141

4242
# ensure tuning improved RMSE
4343
assert tuned_score[learner_name] < untuned_score[learner_name]
44-

doubleml/tests/test_did_cs_tune_ml_models.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,4 @@
1-
import logging
2-
31
import numpy as np
4-
import optuna
52
import pytest
63
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
74

@@ -51,4 +48,3 @@ def test_doubleml_did_cs_optuna_tune(sampler_name, optuna_sampler, score):
5148

5249
# ensure tuning improved RMSE
5350
assert tuned_score[learner_name] < untuned_score[learner_name]
54-

doubleml/tests/test_dml_tune_optuna.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ def _assert_tree_params(param_dict, depth_range=(2, 10), leaf_range=(2, 100), le
3838
assert leaf_range[0] <= param_dict["min_samples_leaf"] <= leaf_range[1]
3939
assert leaf_nodes_range[0] <= param_dict["max_leaf_nodes"] <= leaf_nodes_range[1]
4040

41+
4142
def _build_param_space(dml_obj, param_fn):
4243
"""Build parameter grid using the actual params_names from the DML object."""
4344
param_grid = {learner_name: param_fn for learner_name in dml_obj.params_names}

doubleml/tests/test_irm_tune_ml_models.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
import numpy as np
2-
import optuna
32
import pytest
43
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor
54

doubleml/tests/test_plr_tune_ml_models.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,4 +53,4 @@ def test_doubleml_plr_optuna_tune(sampler_name, optuna_sampler):
5353

5454
# ensure tuning improved RMSE
5555
assert tuned_score["ml_l"] < untuned_score["ml_l"]
56-
assert tuned_score["ml_m"] < untuned_score["ml_m"]
56+
assert tuned_score["ml_m"] < untuned_score["ml_m"]

doubleml/utils/_tune_optuna.py

Lines changed: 4 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -148,20 +148,13 @@ def _resolve_optuna_scoring(scoring_method, learner, params_name):
148148
return scoring_method, message
149149

150150
if is_regressor(learner):
151-
message = (
152-
"No scoring method provided, using 'neg_root_mean_squared_error' (RMSE) "
153-
f"for learner '{params_name}'."
154-
)
151+
message = "No scoring method provided, using 'neg_root_mean_squared_error' (RMSE) " f"for learner '{params_name}'."
155152
return "neg_root_mean_squared_error", message
156153

157154
if is_classifier(learner):
158-
message = (
159-
f"No scoring method provided, using 'neg_log_loss' "
160-
f"for learner '{params_name}'."
161-
)
155+
message = f"No scoring method provided, using 'neg_log_loss' " f"for learner '{params_name}'."
162156
return "neg_log_loss", message
163157

164-
165158
raise RuntimeError(
166159
f"No scoring method provided and estimator type could not be inferred. Please provide a scoring_method for learner "
167160
f"'{params_name}'."
@@ -247,8 +240,8 @@ def _check_tuning_inputs(
247240
if param_grid_func is not None and not callable(param_grid_func):
248241
raise TypeError(
249242
"param_grid must be a callable function that takes a trial and returns a dict. "
250-
f"Got {type(param_grid_func).__name__} for learner '{params_name}'.")
251-
243+
f"Got {type(param_grid_func).__name__} for learner '{params_name}'."
244+
)
252245

253246
if scoring_method is not None and not callable(scoring_method) and not isinstance(scoring_method, str):
254247
if not isinstance(scoring_method, Iterable):

0 commit comments

Comments
 (0)