Skip to content

Commit

Permalink
ENH: add tests for new conformity score
Browse files Browse the repository at this point in the history
  • Loading branch information
Candice Moyet committed Jul 13, 2023
1 parent 0fc4304 commit b27c567
Show file tree
Hide file tree
Showing 2 changed files with 154 additions and 4 deletions.
134 changes: 132 additions & 2 deletions mapie/tests/test_conformity_scores.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,25 @@
import numpy as np
import pytest

from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures

from mapie._typing import ArrayLike, NDArray
from mapie.conformity_scores import (AbsoluteConformityScore, ConformityScore,
GammaConformityScore)
from mapie.conformity_scores import (AbsoluteConformityScore,
ConformityScore,
GammaConformityScore,
FittedResidualNormalisingScore)
from mapie.regression import MapieRegressor

X_toy = np.array([0, 1, 2, 3, 4, 5]).reshape(-1, 1)
y_toy = np.array([5, 7, 9, 11, 13, 15])
y_pred_list = [4, 7, 10, 12, 13, 12]
conf_scores_list = [1, 0, -1, -1, 0, 3]
conf_scores_gamma_list = [1 / 4, 0, -1 / 10, -1 / 12, 0, 3 / 12]
conf_scores_crf_list = [6.73794700e-03, 0.00000000e+00, 1.23409804e-04,
1.67017008e-05, 0.00000000e+00, 9.17706962e-07]
random_state = 42


Expand Down Expand Up @@ -210,3 +220,123 @@ def test_check_consistency() -> None:
dummy_conf_score.check_consistency(
X_toy, y_toy, y_pred_list, conformity_scores
)


@pytest.mark.parametrize("y_pred", [np.array(y_pred_list), y_pred_list])
def test_crf_prefit_conformity_score_get_conformity_scores(
y_pred: NDArray
) -> None:
"""
Test conformity score computation for ConformalResidualFittingScore
when prefit is True.
"""
residual_estimator = LinearRegression().fit(X_toy, y_toy)
crf_conf_score = FittedResidualNormalisingScore(
residual_estimator=residual_estimator,
prefit=True,
random_state=random_state
)
conf_scores = crf_conf_score.get_conformity_scores(
X_toy, y_toy, y_pred
)
expected_signed_conf_scores = np.array(conf_scores_crf_list)
np.testing.assert_allclose(conf_scores, expected_signed_conf_scores)


@pytest.mark.parametrize("y_pred", [np.array(y_pred_list), y_pred_list])
def test_crf_conformity_score_get_conformity_scores(y_pred: NDArray) -> None:
"""
Test conformity score computation for ConformalResidualFittingScore
when prefit is False.
"""
crf_conf_score = FittedResidualNormalisingScore(random_state=random_state)
conf_scores = crf_conf_score.get_conformity_scores(
X_toy, y_toy, y_pred
)
expected_signed_conf_scores = np.array(
[4.641589e-10, 0.000000e+00, 3.000000e+08]
)
np.testing.assert_allclose(conf_scores, expected_signed_conf_scores)


def test_crf_score_prefit_with_notfitted_estim() -> None:
"""Test that a not fitted estimator and prefit=True raises an error."""
crf_conf_score = FittedResidualNormalisingScore(
residual_estimator=LinearRegression(), prefit=True
)
with pytest.raises(ValueError):
crf_conf_score.get_conformity_scores(
X_toy, y_toy, y_pred_list
)


def test_crf_score_prefit_with_default_params() -> None:
"""Test that no error is raised with default parameters."""
crf_conf_score = FittedResidualNormalisingScore()
conf_scores = crf_conf_score.get_conformity_scores(
X_toy, y_toy, y_pred_list
)
_, X, _, y = train_test_split(X_toy, y_toy, test_size=0.5)
crf_conf_score.get_estimation_distribution(X, y, conf_scores)


def test_invalid_estimator() -> None:
"""Test that an estimator without predct method raises an error."""
class DumbEstimator:
def __init__(self):
pass

crf_conf_score = FittedResidualNormalisingScore(
residual_estimator=DumbEstimator()
)
with pytest.raises(ValueError):
crf_conf_score.get_conformity_scores(
X_toy, y_toy, y_pred_list
)


def test_cross_crf() -> None:
"""Test that crf score called with cross method raises an error."""
with pytest.raises(ValueError):
MapieRegressor(conformity_score=FittedResidualNormalisingScore()).fit(
X_toy, y_toy
)


def test_crf_score_pipe() -> None:
"""
Test that crf score function raises no error with a pipeline estimator.
"""
pipe = Pipeline([
("poly", PolynomialFeatures(degree=2)),
("linear", LinearRegression())
])
mapie_reg = MapieRegressor(
conformity_score=FittedResidualNormalisingScore(
residual_estimator=pipe, split_size=0.2
),
cv="split",
random_state=random_state
)
mapie_reg.fit(np.concatenate((X_toy, X_toy)),
np.concatenate((y_toy, y_toy)))


def test_crf_score_pipe_prefit() -> None:
"""
Test that crf score function raises no error with a pipeline estimator
prefitted.
"""
pipe = Pipeline([
("poly", PolynomialFeatures(degree=2)),
("linear", LinearRegression())
])
pipe.fit(X_toy, y_toy)
mapie_reg = MapieRegressor(
conformity_score=FittedResidualNormalisingScore(
residual_estimator=pipe, split_size=0.2, prefit=True
),
cv="split",
random_state=random_state
)
mapie_reg.fit(X_toy, y_toy)
24 changes: 22 additions & 2 deletions mapie/tests/test_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@

from mapie._typing import NDArray
from mapie.aggregation_functions import aggregate_all
from mapie.conformity_scores import (AbsoluteConformityScore, ConformityScore,
GammaConformityScore)
from mapie.conformity_scores import (AbsoluteConformityScore,
ConformityScore,
GammaConformityScore,
FittedResidualNormalisingScore)
from mapie.metrics import regression_coverage_score
from mapie.regression import MapieRegressor
from mapie.estimator.estimator import EnsembleRegressor
Expand Down Expand Up @@ -590,6 +592,24 @@ def test_conformity_score(
mapie_reg.predict(X, alpha=0.05)


@pytest.mark.parametrize(
"conformity_score", [FittedResidualNormalisingScore()]
)
def test_conformity_score_with_split_strategies(
conformity_score: ConformityScore
) -> None:
"""
Test that any conformity score function that handle only split strategies
with MAPIE raises no error.
"""
mapie_reg = MapieRegressor(
conformity_score=conformity_score,
**STRATEGIES["split"]
)
mapie_reg.fit(X, y + 1e3)
mapie_reg.predict(X, alpha=0.05)


@pytest.mark.parametrize("ensemble", [True, False])
def test_return_only_ypred(ensemble: bool) -> None:
"""Test that if return_multi_pred is False it only returns y_pred."""
Expand Down

0 comments on commit b27c567

Please sign in to comment.