Skip to content

Commit

Permalink
mob next [ci-skip] [ci skip] [skip ci]
Browse files Browse the repository at this point in the history
lastFile:skore/src/skore/sklearn/_comparison/metrics_accessor.py
  • Loading branch information
thomass-dev committed Feb 6, 2025
1 parent 0a6747d commit b6004d5
Showing 1 changed file with 39 additions and 16 deletions.
55 changes: 39 additions & 16 deletions skore/src/skore/sklearn/_comparison/metrics_accessor.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,26 +93,49 @@ def report_metrics(
--------
>>> from sklearn.datasets import load_breast_cancer
>>> from sklearn.linear_model import LogisticRegression
>>> from skore import EstimatorReport
>>> from skore import ComparisonReport
>>> estimator = LogisticRegression().fit(X_train, y_train)
>>> from skore import ComparisonReport, EstimatorReport, train_test_split
>>>
>>> X, y = load_breast_cancer(return_X_y=True)
>>> X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
>>> classifier1 = LogisticRegression(max_iter=10_000)
>>> classifier2 = LogisticRegression(max_iter=10_000)
>>> estimator_report1 = EstimatorReport(classifier1, X_test=X_test, y_test=y_test)
>>> estimator_report2 = EstimatorReport(classifier2, X_test=X_test, y_test=y_test)
>>> comparison_report = ComparisonReport([estimator_report1, estimator_report2])
>>> estimator_1 = LogisticRegression(random_state=42)
>>> estimator_report_1 = EstimatorReport(
... estimator_1,
... X_train=X_train,
... y_train=y_train,
... X_test=X_test,
... y_test=y_test,
... )
>>>
>>> estimator_2 = LogisticRegression(random_state=42)
>>> estimator_report_2 = EstimatorReport(
... estimator_2,
... X_train=X_train,
... y_train=y_train,
... X_test=X_test,
... y_test=y_test,
... )
>>>
>>> comparison_report = ComparisonReport(
... [estimator_report_1, estimator_report_2]
... )
>>>
>>> comparison_report.metrics.report_metrics(
... scoring=["precision", "recall"],
... pos_label=1,
... )
Metric Precision (↗︎) Recall (↗︎)
Estimator
0 LogisticRegression 0.96... 0.97...
1 LogisticRegression 0.96... 0.97...
>>>
>>> comparison_report.metrics.report_metrics(
... scoring=["precision", "recall"], pos_label=1, aggregate=["mean", "std"]
... scoring=["precision", "recall"],
... pos_label=1,
... aggregate=["mean", "std"],
... )
Metric Precision (↗︎) Recall (↗︎)
LogisticRegression mean 0.94... 0.96...
std 0.02... 0.02...
Metric Precision (↗︎) Recall (↗︎)
mean 0.96... 0.97...
std 0.00... 0.00...
"""
return self._compute_metric_scores(
report_metric_name="report_metrics",
Expand Down

0 comments on commit b6004d5

Please sign in to comment.