diff --git a/examples/applications/porto_seguro_keras_under_sampling.py b/examples/applications/porto_seguro_keras_under_sampling.py index ee8a6e2f0..9175427fa 100644 --- a/examples/applications/porto_seguro_keras_under_sampling.py +++ b/examples/applications/porto_seguro_keras_under_sampling.py @@ -151,7 +151,7 @@ def wrapper(*args, **kwds): # mini-batches. import tensorflow from sklearn.metrics import roc_auc_score -from sklearn.utils import parse_version +from sklearn.utils.fixes import parse_version tf_version = parse_version(tensorflow.__version__) diff --git a/imblearn/_config.py b/imblearn/_config.py index 4c093db09..ef98e7305 100644 --- a/imblearn/_config.py +++ b/imblearn/_config.py @@ -7,7 +7,7 @@ from contextlib import contextmanager as contextmanager import sklearn -from sklearn.utils import parse_version +from sklearn.utils.fixes import parse_version sklearn_version = parse_version(sklearn.__version__) diff --git a/imblearn/base.py b/imblearn/base.py index e529fead6..0b2d94e84 100644 --- a/imblearn/base.py +++ b/imblearn/base.py @@ -17,7 +17,7 @@ from sklearn.base import _OneToOneFeatureMixin as OneToOneFeatureMixin from sklearn.preprocessing import label_binarize -from sklearn.utils import parse_version +from sklearn.utils.fixes import parse_version from sklearn.utils.multiclass import check_classification_targets from .utils import check_sampling_strategy, check_target_type diff --git a/imblearn/ensemble/_bagging.py b/imblearn/ensemble/_bagging.py index afcf3fd3a..2808239a7 100644 --- a/imblearn/ensemble/_bagging.py +++ b/imblearn/ensemble/_bagging.py @@ -16,7 +16,7 @@ from sklearn.ensemble._base import _partition_estimators from sklearn.exceptions import NotFittedError from sklearn.tree import DecisionTreeClassifier -from sklearn.utils import parse_version +from sklearn.utils.fixes import parse_version from sklearn.utils.validation import check_is_fitted try: diff --git a/imblearn/ensemble/_easy_ensemble.py b/imblearn/ensemble/_easy_ensemble.py index db3c6cbb7..1da81d93c 100644 --- a/imblearn/ensemble/_easy_ensemble.py +++ b/imblearn/ensemble/_easy_ensemble.py @@ -15,8 +15,8 @@ from sklearn.ensemble._bagging import _parallel_decision_function from sklearn.ensemble._base import _partition_estimators from sklearn.exceptions import NotFittedError -from sklearn.utils import parse_version from sklearn.utils._tags import _safe_tags +from sklearn.utils.fixes import parse_version from sklearn.utils.validation import check_is_fitted try: diff --git a/imblearn/ensemble/_forest.py b/imblearn/ensemble/_forest.py index a7c8f9beb..5f8d08e91 100644 --- a/imblearn/ensemble/_forest.py +++ b/imblearn/ensemble/_forest.py @@ -22,7 +22,8 @@ ) from sklearn.exceptions import DataConversionWarning from sklearn.tree import DecisionTreeClassifier -from sklearn.utils import _safe_indexing, check_random_state, parse_version +from sklearn.utils import _safe_indexing, check_random_state +from sklearn.utils.fixes import parse_version from sklearn.utils.multiclass import type_of_target from sklearn.utils.validation import _check_sample_weight diff --git a/imblearn/ensemble/_weight_boosting.py b/imblearn/ensemble/_weight_boosting.py index 539b7824f..9da02255e 100644 --- a/imblearn/ensemble/_weight_boosting.py +++ b/imblearn/ensemble/_weight_boosting.py @@ -8,7 +8,8 @@ from sklearn.ensemble import AdaBoostClassifier from sklearn.ensemble._base import _set_random_states from sklearn.tree import DecisionTreeClassifier -from sklearn.utils import _safe_indexing, parse_version +from sklearn.utils import _safe_indexing +from sklearn.utils.fixes import parse_version from sklearn.utils.validation import has_fit_parameter from ..base import _ParamsValidationMixin diff --git a/imblearn/keras/tests/test_generator.py b/imblearn/keras/tests/test_generator.py index f49ecd0aa..a073d846d 100644 --- a/imblearn/keras/tests/test_generator.py +++ b/imblearn/keras/tests/test_generator.py @@ -70,7 +70,7 @@ def test_balanced_batch_generator_class(data, sampler, sample_weight): batch_size=10, random_state=42, ) - model.fit_generator(generator=training_generator, epochs=10) + model.fit(training_generator, epochs=10) @pytest.mark.parametrize("keep_sparse", [True, False]) @@ -122,8 +122,8 @@ def test_balanced_batch_generator_function(data, sampler, sample_weight): batch_size=10, random_state=42, ) - model.fit_generator( - generator=training_generator, + model.fit( + training_generator, steps_per_epoch=steps_per_epoch, epochs=10, ) diff --git a/imblearn/tests/test_pipeline.py b/imblearn/tests/test_pipeline.py index c39758d9f..409dbce41 100644 --- a/imblearn/tests/test_pipeline.py +++ b/imblearn/tests/test_pipeline.py @@ -410,7 +410,7 @@ def test_fit_predict_on_pipeline_without_fit_predict(): scaler = StandardScaler() pca = PCA(svd_solver="full") pipe = Pipeline([("scaler", scaler), ("pca", pca)]) - error_regex = "'PCA' object has no attribute 'fit_predict'" + error_regex = "has no attribute 'fit_predict'" with raises(AttributeError, match=error_regex): getattr(pipe, "fit_predict") @@ -1219,7 +1219,7 @@ def test_score_samples_on_pipeline_without_score_samples(): pipe.fit(X, y) with pytest.raises( AttributeError, - match="'LogisticRegression' object has no attribute 'score_samples'", + match="has no attribute 'score_samples'", ): pipe.score_samples(X) diff --git a/imblearn/utils/_available_if.py b/imblearn/utils/_available_if.py index 9b2c5e6db..bca75e735 100644 --- a/imblearn/utils/_available_if.py +++ b/imblearn/utils/_available_if.py @@ -7,7 +7,7 @@ from types import MethodType import sklearn -from sklearn.utils import parse_version +from sklearn.utils.fixes import parse_version sklearn_version = parse_version(sklearn.__version__)