Compare commits
No commits in common. 'master' and 'v0.9.7' have entirely different histories.
@ -1,5 +0,0 @@
|
|||||||
src/dinglehopper/tests
|
|
||||||
dist
|
|
||||||
build
|
|
||||||
*.egg-info
|
|
||||||
.git
|
|
@ -1,33 +0,0 @@
|
|||||||
PYTHON = python3
|
|
||||||
PIP = pip3
|
|
||||||
PYTHONIOENCODING=utf8
|
|
||||||
PYTEST_ARGS = -vv
|
|
||||||
|
|
||||||
DOCKER_BASE_IMAGE = docker.io/ocrd/core:v3.3.0
|
|
||||||
DOCKER_TAG = ocrd/dinglehopper
|
|
||||||
|
|
||||||
help:
|
|
||||||
@echo
|
|
||||||
@echo " Targets"
|
|
||||||
@echo
|
|
||||||
@echo " install Install full Python package via pip"
|
|
||||||
@echo " docker Build the ocrd/dinglehopper docker image"
|
|
||||||
|
|
||||||
# Install Python package via pip
|
|
||||||
install:
|
|
||||||
$(PIP) install .
|
|
||||||
|
|
||||||
install-dev:
|
|
||||||
$(PIP) install -e .
|
|
||||||
|
|
||||||
test:
|
|
||||||
pytest $(PYTEST_ARGS)
|
|
||||||
|
|
||||||
docker:
|
|
||||||
docker build \
|
|
||||||
--build-arg DOCKER_BASE_IMAGE=$(DOCKER_BASE_IMAGE) \
|
|
||||||
--build-arg VCS_REF=$$(git rev-parse --short HEAD) \
|
|
||||||
--build-arg BUILD_DATE=$$(date -u +"%Y-%m-%dT%H:%M:%SZ") \
|
|
||||||
-t $(DOCKER_TAG) .
|
|
||||||
|
|
||||||
.PHONY: help install install-dev test docker
|
|
@ -1,78 +1,82 @@
|
|||||||
from functools import cached_property
|
import json
|
||||||
import os
|
import os
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from ocrd_models import OcrdFileType
|
import importlib_resources
|
||||||
from ocrd import Processor
|
from ocrd import Processor
|
||||||
from ocrd.decorators import ocrd_cli_options, ocrd_cli_wrap_processor
|
from ocrd.decorators import ocrd_cli_options, ocrd_cli_wrap_processor
|
||||||
from ocrd_utils import make_file_id
|
from ocrd_utils import assert_file_grp_cardinality, getLogger, make_file_id
|
||||||
|
|
||||||
from .cli import process as cli_process
|
from .cli import process as cli_process
|
||||||
|
|
||||||
|
OCRD_TOOL = json.loads(
|
||||||
|
importlib_resources.files(__name__)
|
||||||
|
.joinpath("ocrd-tool.json")
|
||||||
|
.read_text(encoding="utf-8", errors="strict")
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@ocrd_cli_options
|
@ocrd_cli_options
|
||||||
def ocrd_dinglehopper(*args, **kwargs):
|
def ocrd_dinglehopper(*args, **kwargs):
|
||||||
return ocrd_cli_wrap_processor(OcrdDinglehopperEvaluate, *args, **kwargs)
|
return ocrd_cli_wrap_processor(OcrdDinglehopperEvaluate, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
class OcrdDinglehopperEvaluate(Processor):
|
class OcrdDinglehopperEvaluate(Processor):
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
kwargs["ocrd_tool"] = OCRD_TOOL["tools"]["ocrd-dinglehopper"]
|
||||||
|
super(OcrdDinglehopperEvaluate, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
@cached_property
|
def process(self):
|
||||||
def executable(self):
|
assert_file_grp_cardinality(self.input_file_grp, 2, "GT and OCR")
|
||||||
return 'ocrd-dinglehopper'
|
assert_file_grp_cardinality(self.output_file_grp, 1)
|
||||||
|
|
||||||
def process_page_file(self, *input_files: Optional[OcrdFileType]) -> None:
|
log = getLogger("processor.OcrdDinglehopperEvaluate")
|
||||||
|
|
||||||
assert self.parameter
|
|
||||||
metrics = self.parameter["metrics"]
|
metrics = self.parameter["metrics"]
|
||||||
textequiv_level = self.parameter["textequiv_level"]
|
textequiv_level = self.parameter["textequiv_level"]
|
||||||
plain_encoding = self.parameter["plain_encoding"]
|
gt_grp, ocr_grp = self.input_file_grp.split(",")
|
||||||
|
|
||||||
# wrong number of inputs: let fail
|
input_file_tuples = self.zip_input_files(on_error="abort")
|
||||||
gt_file, ocr_file = input_files
|
for n, (gt_file, ocr_file) in enumerate(input_file_tuples):
|
||||||
# missing on either side: skip (zip_input_files already warned)
|
if not gt_file or not ocr_file:
|
||||||
if not gt_file or not ocr_file:
|
# file/page was not found in this group
|
||||||
return
|
continue
|
||||||
# missing download (i.e. OCRD_DOWNLOAD_INPUT=false):
|
gt_file = self.workspace.download_file(gt_file)
|
||||||
if not gt_file.local_filename:
|
ocr_file = self.workspace.download_file(ocr_file)
|
||||||
if config.OCRD_MISSING_INPUT == 'ABORT':
|
page_id = gt_file.pageId
|
||||||
raise MissingInputFile(gt_file.fileGrp, gt_file.pageId, gt_file.mimetype)
|
|
||||||
return
|
log.info("INPUT FILES %i / %s↔ %s", n, gt_file, ocr_file)
|
||||||
if not ocr_file.local_filename:
|
|
||||||
if config.OCRD_MISSING_INPUT == 'ABORT':
|
file_id = make_file_id(ocr_file, self.output_file_grp)
|
||||||
raise MissingInputFile(ocr_file.fileGrp, ocr_file.pageId, ocr_file.mimetype)
|
report_prefix = os.path.join(self.output_file_grp, file_id)
|
||||||
return
|
|
||||||
|
# Process the files
|
||||||
page_id = gt_file.pageId
|
try:
|
||||||
|
os.mkdir(self.output_file_grp)
|
||||||
file_id = make_file_id(ocr_file, self.output_file_grp)
|
except FileExistsError:
|
||||||
cli_process(
|
pass
|
||||||
gt_file.local_filename,
|
cli_process(
|
||||||
ocr_file.local_filename,
|
gt_file.local_filename,
|
||||||
file_id,
|
ocr_file.local_filename,
|
||||||
self.output_file_grp,
|
report_prefix,
|
||||||
metrics=metrics,
|
metrics=metrics,
|
||||||
textequiv_level=textequiv_level,
|
textequiv_level=textequiv_level,
|
||||||
plain_encoding=plain_encoding,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add reports to the workspace
|
|
||||||
for report_suffix, mimetype in [
|
|
||||||
[".html", "text/html"],
|
|
||||||
[".json", "application/json"],
|
|
||||||
]:
|
|
||||||
output_file_id = file_id + report_suffix
|
|
||||||
output_file = next(self.workspace.mets.find_files(ID=output_file_id), None)
|
|
||||||
if output_file and config.OCRD_EXISTING_OUTPUT != 'OVERWRITE':
|
|
||||||
raise FileExistsError(f"A file with ID=={output_file_id} already exists {output_file} and neither force nor ignore are set")
|
|
||||||
self.workspace.add_file(
|
|
||||||
file_id=output_file_id,
|
|
||||||
file_grp=self.output_file_grp,
|
|
||||||
page_id=page_id,
|
|
||||||
mimetype=mimetype,
|
|
||||||
local_filename=file_id + report_suffix,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Add reports to the workspace
|
||||||
|
for report_suffix, mimetype in [
|
||||||
|
[".html", "text/html"],
|
||||||
|
[".json", "application/json"],
|
||||||
|
]:
|
||||||
|
self.workspace.add_file(
|
||||||
|
file_id=file_id + report_suffix,
|
||||||
|
file_grp=self.output_file_grp,
|
||||||
|
page_id=page_id,
|
||||||
|
mimetype=mimetype,
|
||||||
|
local_filename=report_prefix + report_suffix,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
ocrd_dinglehopper()
|
ocrd_dinglehopper()
|
||||||
|
@ -1 +0,0 @@
|
|||||||
This is a test.
|
|
@ -1 +0,0 @@
|
|||||||
Another test.
|
|
@ -1 +0,0 @@
|
|||||||
Tis is a test.
|
|
@ -1 +0,0 @@
|
|||||||
AnÖther test.
|
|
@ -1 +0,0 @@
|
|||||||
This is a test.
|
|
@ -1 +0,0 @@
|
|||||||
Tis is a test.
|
|
@ -1 +0,0 @@
|
|||||||
Another test.
|
|
@ -1 +0,0 @@
|
|||||||
AnÖther test.
|
|
@ -1 +0,0 @@
|
|||||||
This is a test.
|
|
@ -1 +0,0 @@
|
|||||||
Another test.
|
|
@ -1 +0,0 @@
|
|||||||
Tis is a test.
|
|
@ -1 +0,0 @@
|
|||||||
AnÖther test.
|
|
@ -1,61 +0,0 @@
|
|||||||
import json
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from ..cli_line_dirs import process
|
|
||||||
from .util import working_directory
|
|
||||||
|
|
||||||
data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_cli_line_dirs_basic(tmp_path):
|
|
||||||
"""Test that the cli/process() produces a good report"""
|
|
||||||
|
|
||||||
with working_directory(tmp_path):
|
|
||||||
gt_dir = os.path.join(data_dir, "line_dirs/basic/gt")
|
|
||||||
ocr_dir = os.path.join(data_dir, "line_dirs/basic/ocr")
|
|
||||||
process(gt_dir, ocr_dir, "report")
|
|
||||||
with open("report.json", "r") as jsonf:
|
|
||||||
print(jsonf.read())
|
|
||||||
with open("report.json", "r") as jsonf:
|
|
||||||
j = json.load(jsonf)
|
|
||||||
assert j["cer"] == pytest.approx(0.1071429)
|
|
||||||
assert j["wer"] == pytest.approx(0.5)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_cli_line_dirs_basic_report_diff(tmp_path):
|
|
||||||
"""Test that the cli/process() produces a report wiff char+word diff"""
|
|
||||||
|
|
||||||
with working_directory(tmp_path):
|
|
||||||
gt_dir = os.path.join(data_dir, "line_dirs/basic/gt")
|
|
||||||
ocr_dir = os.path.join(data_dir, "line_dirs/basic/ocr")
|
|
||||||
process(gt_dir, ocr_dir, "report")
|
|
||||||
|
|
||||||
with open("report.html", "r") as htmlf:
|
|
||||||
html_report = htmlf.read()
|
|
||||||
|
|
||||||
# Counting GT lines in the diff
|
|
||||||
assert len(re.findall(r"gt.*l\d+-cdiff", html_report)) == 2
|
|
||||||
assert len(re.findall(r"gt.*l\d+-wdiff", html_report)) == 2
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
def test_cli_line_dirs_merged(tmp_path):
|
|
||||||
"""Test that the cli/process() produces a good report"""
|
|
||||||
|
|
||||||
with working_directory(tmp_path):
|
|
||||||
gt_dir = os.path.join(data_dir, "line_dirs/merged")
|
|
||||||
ocr_dir = os.path.join(data_dir, "line_dirs/merged")
|
|
||||||
process(
|
|
||||||
gt_dir, ocr_dir, "report", gt_suffix=".gt.txt", ocr_suffix=".some-ocr.txt"
|
|
||||||
)
|
|
||||||
with open("report.json", "r") as jsonf:
|
|
||||||
print(jsonf.read())
|
|
||||||
with open("report.json", "r") as jsonf:
|
|
||||||
j = json.load(jsonf)
|
|
||||||
assert j["cer"] == pytest.approx(0.1071429)
|
|
||||||
assert j["wer"] == pytest.approx(0.5)
|
|
@ -1,71 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
from ..cli_line_dirs import find_gt_and_ocr_files, find_gt_and_ocr_files_autodetect
|
|
||||||
|
|
||||||
data_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "data")
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic():
|
|
||||||
"""Test the dumb method: User gives directories and suffixes."""
|
|
||||||
pairs = list(
|
|
||||||
find_gt_and_ocr_files(
|
|
||||||
os.path.join(data_dir, "line_dirs/basic/gt"),
|
|
||||||
".gt.txt",
|
|
||||||
os.path.join(data_dir, "line_dirs/basic/ocr"),
|
|
||||||
".some-ocr.txt",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(pairs) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic_autodetect():
|
|
||||||
"""Test autodetect: User gives directories, suffixes are autodetected if possible"""
|
|
||||||
pairs = list(
|
|
||||||
find_gt_and_ocr_files_autodetect(
|
|
||||||
os.path.join(data_dir, "line_dirs/basic/gt"),
|
|
||||||
os.path.join(data_dir, "line_dirs/basic/ocr"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(pairs) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_subdirs():
|
|
||||||
"""Test the dumb method: Should also work when subdirectories are involved."""
|
|
||||||
pairs = list(
|
|
||||||
find_gt_and_ocr_files(
|
|
||||||
os.path.join(data_dir, "line_dirs/subdirs/gt"),
|
|
||||||
".gt.txt",
|
|
||||||
os.path.join(data_dir, "line_dirs/subdirs/ocr"),
|
|
||||||
".some-ocr.txt",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(pairs) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_subdirs_autodetect():
|
|
||||||
"""Test the autodetect method: Should also work when subdirectories are involved."""
|
|
||||||
pairs = list(
|
|
||||||
find_gt_and_ocr_files_autodetect(
|
|
||||||
os.path.join(data_dir, "line_dirs/subdirs/gt"),
|
|
||||||
os.path.join(data_dir, "line_dirs/subdirs/ocr"),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(pairs) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_merged():
|
|
||||||
"""Test the dumb method: GT and OCR texts are in the same directories."""
|
|
||||||
pairs = list(
|
|
||||||
find_gt_and_ocr_files(
|
|
||||||
os.path.join(data_dir, "line_dirs/merged"),
|
|
||||||
".gt.txt",
|
|
||||||
os.path.join(data_dir, "line_dirs/merged"),
|
|
||||||
".some-ocr.txt",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert len(pairs) == 2
|
|
Loading…
Reference in New Issue