diff --git a/src/pie_modules/document/processing/tokenization.py b/src/pie_modules/document/processing/tokenization.py index e1fefc944..a9e83564e 100644 --- a/src/pie_modules/document/processing/tokenization.py +++ b/src/pie_modules/document/processing/tokenization.py @@ -257,7 +257,9 @@ def char_to_token(char_idx: int) -> Optional[int]: ) if added_annotations is not None: for layer_name, orig_ann_id2new_ann in added_annotations_from_remaining_layers.items(): - ann_id2ann = {ann._id: ann for ann in doc[layer_name]} + ann_id2ann = { + ann._id: ann for ann in list(doc[layer_name]) + list(doc[layer_name].predictions) + } annotation_mapping = { ann_id2ann[orig_ann_id]: new_ann for orig_ann_id, new_ann in orig_ann_id2new_ann.items() @@ -385,7 +387,9 @@ def token_based_document_to_text_based( ) if added_annotations is not None: for layer_name, orig_ann_id2new_ann in added_annotations_from_remaining_layers.items(): - ann_id2ann = {ann._id: ann for ann in doc[layer_name]} + ann_id2ann = { + ann._id: ann for ann in list(doc[layer_name]) + list(doc[layer_name].predictions) + } annotation_mapping = { ann_id2ann[orig_ann_id]: new_ann for orig_ann_id, new_ann in orig_ann_id2new_ann.items()