Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add learning_rate to AdaBoost classifier and regressor. #251

Merged
merged 4 commits into from
Apr 28, 2023
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 14 additions & 4 deletions src/safeds/ml/classical/classification/_ada_boost.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,22 @@


class AdaBoost(Classifier):
"""Ada Boost classification."""
"""Ada Boost classification.

def __init__(self) -> None:
Parameters
----------
learning_rate : float
Weight applied to each classifier at each boosting iteration.
A higher learning rate increases the contribution of each classifier.
"""

def __init__(self, learning_rate: float = 1.0) -> None:
self._wrapped_classifier: sk_AdaBoostClassifier | None = None
self._feature_names: list[str] | None = None
self._target_name: str | None = None
if learning_rate <= 0:
raise ValueError("learning_rate must be positive.")
self._learning_rate = learning_rate

def fit(self, training_set: TaggedTable) -> AdaBoost:
"""
Expand All @@ -41,10 +51,10 @@ def fit(self, training_set: TaggedTable) -> AdaBoost:
LearningError
If the training data contains invalid values or if the training failed.
"""
wrapped_classifier = sk_AdaBoostClassifier()
wrapped_classifier = sk_AdaBoostClassifier(learning_rate=self._learning_rate)
fit(wrapped_classifier, training_set)

result = AdaBoost()
result = AdaBoost(learning_rate=self._learning_rate)
result._wrapped_classifier = wrapped_classifier
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
18 changes: 14 additions & 4 deletions src/safeds/ml/classical/regression/_ada_boost.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,22 @@


class AdaBoost(Regressor):
"""Ada Boost regression."""
"""Ada Boost regression.

def __init__(self) -> None:
Parameters
----------
learning_rate : float
Weight applied to each regressor at each boosting iteration.
A higher learning rate increases the contribution of each regressor.
"""

def __init__(self, learning_rate: float = 1.0) -> None:
self._wrapped_regressor: sk_AdaBoostRegressor | None = None
self._feature_names: list[str] | None = None
self._target_name: str | None = None
if learning_rate <= 0:
raise ValueError("learning_rate must be positive.")
self.learning_rate = learning_rate

def fit(self, training_set: TaggedTable) -> AdaBoost:
"""
Expand All @@ -41,10 +51,10 @@ def fit(self, training_set: TaggedTable) -> AdaBoost:
LearningError
If the training data contains invalid values or if the training failed.
"""
wrapped_regressor = sk_AdaBoostRegressor()
wrapped_regressor = sk_AdaBoostRegressor(learning_rate=self.learning_rate)
fit(wrapped_regressor, training_set)

result = AdaBoost()
result = AdaBoost(learning_rate=self.learning_rate)
result._wrapped_regressor = wrapped_regressor
result._feature_names = training_set.features.column_names
result._target_name = training_set.target.name
Expand Down
17 changes: 17 additions & 0 deletions tests/safeds/ml/classical/classification/test_ada_boost.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import pytest
from safeds.data.tabular.containers import Table
from safeds.ml.classical.classification import AdaBoost


def test_should_throw_value_error() -> None:
alex-senger marked this conversation as resolved.
Show resolved Hide resolved
with pytest.raises(ValueError, match="learning_rate must be positive."):
AdaBoost(learning_rate=-1)


def test_should_give_learning_rate_to_sklearn() -> None:
training_set = Table.from_dict({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]})
tagged_table = training_set.tag_columns("col1")

regressor = AdaBoost(learning_rate=2).fit(tagged_table)
assert regressor._wrapped_classifier is not None
assert regressor._wrapped_classifier.learning_rate == regressor._learning_rate
17 changes: 17 additions & 0 deletions tests/safeds/ml/classical/regression/test_ada_boost.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import pytest
from safeds.data.tabular.containers import Table
from safeds.ml.classical.regression import AdaBoost


def test_should_throw_value_error() -> None:
alex-senger marked this conversation as resolved.
Show resolved Hide resolved
with pytest.raises(ValueError, match="learning_rate must be positive."):
AdaBoost(learning_rate=-1)


def test_should_give_learning_rate_to_sklearn() -> None:
training_set = Table.from_dict({"col1": [1, 2, 3, 4], "col2": [1, 2, 3, 4]})
tagged_table = training_set.tag_columns("col1")

regressor = AdaBoost(learning_rate=2).fit(tagged_table)
assert regressor._wrapped_regressor is not None
assert regressor._wrapped_regressor.learning_rate == regressor.learning_rate