From 6cd54f2b5f9b1070c8f0b724732a0af04e856aa6 Mon Sep 17 00:00:00 2001 From: Charalampos Stratakis Date: Thu, 4 Dec 2025 01:23:38 +0100 Subject: [PATCH] Fix compatibility with scikit-learn 1.8+ scikit-learn 1.8.0+ removed the algorithm parameter from AdaBoostClassifier https://github.com/scikit-learn/scikit-learn/pull/32262 The changes maintain compatibility with previous version of scikit-learn --- README.md | 3 +-- docs/getting_started.rst | 3 +-- examples/classifier.py | 2 +- niaaml/classifiers/ada_boost.py | 3 +-- 4 files changed, 4 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index cbeb6a3..ca4effa 100644 --- a/README.md +++ b/README.md @@ -172,8 +172,7 @@ In the modifier version of NiaAML optimization process there are two types of op ```python self._params = dict( - n_estimators = ParameterDefinition(MinMax(min=10, max=111), np.uint), - algorithm = ParameterDefinition(['SAMME', 'SAMME.R']) + n_estimators = ParameterDefinition(MinMax(min=10, max=111), np.uint) ) ``` diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 43acb29..ddb700b 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -181,8 +181,7 @@ In NiaAML there are two types of optimization. Goal of the first type is to find .. code:: python self._params = dict( - n_estimators = ParameterDefinition(MinMax(min=10, max=111), np.uint), - algorithm = ParameterDefinition(['SAMME', 'SAMME.R']) + n_estimators = ParameterDefinition(MinMax(min=10, max=111), np.uint) ) An individual in the second type of optimization is a real-valued vector that has a size equal to the sum of number of keys in all three dictionaries (classifier's _params, feature transformation algorithm's _params and feature selection algorithm's _params) and a value of each dimension is in range [0.0, 1.0]. The second type of optimization maps real values from the individual's vector to those parameter definitions in the dictionaries. Each parameter's value can be defined as a range or array of values. In the first case, a value from vector is mapped from one iterval to another and in the second case, a value from vector falls into one of the bins that represent an index of the array that holds possible parameter's values. diff --git a/examples/classifier.py b/examples/classifier.py index 8fa4685..b238e5d 100644 --- a/examples/classifier.py +++ b/examples/classifier.py @@ -19,7 +19,7 @@ classifier = AdaBoost() # set parameters of the classifier -classifier.set_parameters(n_estimators=50, algorithm="SAMME") +classifier.set_parameters(n_estimators=50) # fit classifier to the data classifier.fit(data_reader.get_x(), data_reader.get_y()) diff --git a/niaaml/classifiers/ada_boost.py b/niaaml/classifiers/ada_boost.py index dffc4f0..be6c292 100644 --- a/niaaml/classifiers/ada_boost.py +++ b/niaaml/classifiers/ada_boost.py @@ -51,9 +51,8 @@ def __init__(self, **kwargs): self._params = dict( n_estimators=ParameterDefinition(MinMax(min=10, max=111), np.uint), - algorithm=ParameterDefinition(["SAMME"]), ) - self.__ada_boost = AdaBoostClassifier(algorithm='SAMME') + self.__ada_boost = AdaBoostClassifier() def set_parameters(self, **kwargs): r"""Set the parameters/arguments of the algorithm."""