-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #261 from VikParuchuri/dev
Integrate new OCR
- Loading branch information
Showing
24 changed files
with
1,653 additions
and
88 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,77 @@ | ||
import argparse | ||
import json | ||
|
||
import datasets | ||
from surya.schema import LayoutResult, LayoutBox | ||
from tqdm import tqdm | ||
|
||
from marker.benchmark.table import score_table | ||
from marker.schema.bbox import rescale_bbox | ||
from marker.schema.page import Page | ||
from marker.tables.table import format_tables | ||
|
||
|
||
|
||
def main(): | ||
parser = argparse.ArgumentParser(description="Benchmark table conversion.") | ||
parser.add_argument("out_file", help="Output filename for results") | ||
parser.add_argument("--dataset", type=str, help="Dataset to use", default="vikp/table_bench") | ||
args = parser.parse_args() | ||
|
||
ds = datasets.load_dataset(args.dataset, split="train") | ||
|
||
results = [] | ||
for i in tqdm(range(len(ds)), desc="Evaluating tables"): | ||
row = ds[i] | ||
marker_page = Page(**json.loads(row["marker_page"])) | ||
table_bbox = row["table_bbox"] | ||
gpt4_table = json.loads(row["gpt_4_table"])["markdown_table"] | ||
|
||
# Counterclockwise polygon from top left | ||
table_poly = [ | ||
[table_bbox[0], table_bbox[1]], | ||
[table_bbox[2], table_bbox[1]], | ||
[table_bbox[2], table_bbox[3]], | ||
[table_bbox[0], table_bbox[3]], | ||
] | ||
|
||
# Remove all other tables from the layout results | ||
layout_result = LayoutResult( | ||
bboxes=[ | ||
LayoutBox( | ||
label="Table", | ||
polygon=table_poly | ||
) | ||
], | ||
segmentation_map="", | ||
image_bbox=marker_page.text_lines.image_bbox | ||
) | ||
|
||
marker_page.layout = layout_result | ||
format_tables([marker_page]) | ||
|
||
table_blocks = [block for block in marker_page.blocks if block.block_type == "Table"] | ||
if len(table_blocks) != 1: | ||
continue | ||
|
||
table_block = table_blocks[0] | ||
table_md = table_block.lines[0].spans[0].text | ||
|
||
results.append({ | ||
"score": score_table(table_md, gpt4_table), | ||
"arxiv_id": row["arxiv_id"], | ||
"page_idx": row["page_idx"], | ||
"marker_table": table_md, | ||
"gpt4_table": gpt4_table, | ||
"table_bbox": table_bbox | ||
}) | ||
|
||
avg_score = sum([r["score"] for r in results]) / len(results) | ||
print(f"Evaluated {len(results)} tables, average score is {avg_score}.") | ||
|
||
with open(args.out_file, "w+") as f: | ||
json.dump(results, f, indent=2) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
from rapidfuzz import fuzz | ||
import re | ||
|
||
|
||
def split_to_cells(table): | ||
table = table.strip() | ||
table = re.sub(r" {2,}", "", table) | ||
table_rows = table.split("\n") | ||
table_rows = [t for t in table_rows if t.strip()] | ||
table_cells = [r.split("|") for r in table_rows] | ||
return table_cells | ||
|
||
|
||
def align_rows(hypothesis, ref_row): | ||
best_alignment = [] | ||
best_alignment_score = 0 | ||
for j in range(0, len(hypothesis)): | ||
alignments = [] | ||
for i in range(len(ref_row)): | ||
if i >= len(hypothesis[j]): | ||
alignments.append(0) | ||
continue | ||
alignment = fuzz.ratio(hypothesis[j][i], ref_row[i], score_cutoff=30) / 100 | ||
alignments.append(alignment) | ||
if len(alignments) == 0: | ||
continue | ||
alignment_score = sum(alignments) / len(alignments) | ||
if alignment_score >= best_alignment_score: | ||
best_alignment = alignments | ||
best_alignment_score = alignment_score | ||
return best_alignment | ||
|
||
|
||
def score_table(hypothesis, reference): | ||
hypothesis = split_to_cells(hypothesis) | ||
reference = split_to_cells(reference) | ||
|
||
alignments = [] | ||
for i in range(0, len(reference)): | ||
alignments.extend(align_rows(hypothesis, reference[i])) | ||
return sum(alignments) / len(alignments) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.