Skip to content

Commit

Permalink
revert: rename metric key to "micro_without_tn/f1"
Browse files Browse the repository at this point in the history
  • Loading branch information
ArneBinder committed Oct 8, 2024
1 parent bbe282d commit 0c6f9ea
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 16 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1107,7 +1107,7 @@ def configure_model_metric(self, stage: str) -> MetricCollection:
),
# We can not easily calculate the macro f1 here, because
# F1Score with average="macro" would still include the none_label.
"micro_without_tn/f1": WrappedMetricWithPrepareFunction(
"micro/f1_without_tn": WrappedMetricWithPrepareFunction(
metric=F1Score(average="micro", **common_metric_kwargs),
prepare_together_function=partial(
_get_labels_together_remove_none_label,
Expand Down
30 changes: 15 additions & 15 deletions tests/taskmodules/test_re_text_classification_with_indices.py
Original file line number Diff line number Diff line change
Expand Up @@ -1717,10 +1717,10 @@ def test_configure_model_metric(documents, taskmodule):
assert isinstance(metric, (Metric, MetricCollection))
state = get_metric_state(metric)
assert state == {
"micro_without_tn/f1/tp": [0],
"micro_without_tn/f1/fp": [0],
"micro_without_tn/f1/tn": [0],
"micro_without_tn/f1/fn": [0],
"micro/f1_without_tn/tp": [0],
"micro/f1_without_tn/fp": [0],
"micro/f1_without_tn/tn": [0],
"micro/f1_without_tn/fn": [0],
"with_tn/f1_per_label/tp": [0, 0, 0, 0],
"with_tn/f1_per_label/fp": [0, 0, 0, 0],
"with_tn/f1_per_label/tn": [0, 0, 0, 0],
Expand All @@ -1741,17 +1741,17 @@ def test_configure_model_metric(documents, taskmodule):
"per:founder/f1": tensor(0.0),
"macro/f1": tensor(0.0),
"micro/f1": tensor(0.0),
"micro_without_tn/f1": tensor(0.0),
"micro/f1_without_tn": tensor(0.0),
}

targets = batch[1]
metric.update(targets, targets)
state = get_metric_state(metric)
assert state == {
"micro_without_tn/f1/tp": [7],
"micro_without_tn/f1/fp": [0],
"micro_without_tn/f1/tn": [21],
"micro_without_tn/f1/fn": [0],
"micro/f1_without_tn/tp": [7],
"micro/f1_without_tn/fp": [0],
"micro/f1_without_tn/tn": [21],
"micro/f1_without_tn/fn": [0],
"with_tn/f1_per_label/tp": [0, 2, 3, 2],
"with_tn/f1_per_label/fp": [0, 0, 0, 0],
"with_tn/f1_per_label/tn": [7, 5, 4, 5],
Expand All @@ -1772,7 +1772,7 @@ def test_configure_model_metric(documents, taskmodule):
"per:founder/f1": tensor(1.0),
"macro/f1": tensor(1.0),
"micro/f1": tensor(1.0),
"micro_without_tn/f1": tensor(1.0),
"micro/f1_without_tn": tensor(1.0),
}

metric.reset()
Expand All @@ -1782,10 +1782,10 @@ def test_configure_model_metric(documents, taskmodule):
metric.update(random_predictions, modified_targets)
state = get_metric_state(metric)
assert state == {
"micro_without_tn/f1/tp": [3],
"micro_without_tn/f1/fp": [3],
"micro_without_tn/f1/tn": [15],
"micro_without_tn/f1/fn": [3],
"micro/f1_without_tn/tp": [3],
"micro/f1_without_tn/fp": [3],
"micro/f1_without_tn/tn": [15],
"micro/f1_without_tn/fn": [3],
"with_tn/f1_per_label/tp": [1, 1, 1, 1],
"with_tn/f1_per_label/fp": [1, 2, 0, 0],
"with_tn/f1_per_label/tn": [5, 3, 4, 6],
Expand All @@ -1809,7 +1809,7 @@ def test_configure_model_metric(documents, taskmodule):
"per:founder/f1": tensor(1.0),
"macro/f1": tensor(0.641667),
"micro/f1": tensor(0.571429),
"micro_without_tn/f1": tensor(0.500000),
"micro/f1_without_tn": tensor(0.500000),
},
)

Expand Down

0 comments on commit 0c6f9ea

Please sign in to comment.