commit
b1c109baae
@ -0,0 +1,5 @@
|
||||
src/dinglehopper/tests
|
||||
dist
|
||||
build
|
||||
*.egg-info
|
||||
.git
|
@ -1,83 +1,76 @@
|
||||
import json
|
||||
from functools import cached_property
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
import click
|
||||
import importlib_resources
|
||||
from ocrd_models import OcrdFileType
|
||||
from ocrd import Processor
|
||||
from ocrd.decorators import ocrd_cli_options, ocrd_cli_wrap_processor
|
||||
from ocrd_utils import assert_file_grp_cardinality, getLogger, make_file_id
|
||||
from ocrd_utils import make_file_id
|
||||
|
||||
from .cli import process as cli_process
|
||||
|
||||
OCRD_TOOL = json.loads(
|
||||
importlib_resources.files(__name__)
|
||||
.joinpath("ocrd-tool.json")
|
||||
.read_text(encoding="utf-8", errors="strict")
|
||||
)
|
||||
|
||||
|
||||
@click.command()
|
||||
@ocrd_cli_options
|
||||
def ocrd_dinglehopper(*args, **kwargs):
|
||||
return ocrd_cli_wrap_processor(OcrdDinglehopperEvaluate, *args, **kwargs)
|
||||
|
||||
|
||||
class OcrdDinglehopperEvaluate(Processor):
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs["ocrd_tool"] = OCRD_TOOL["tools"]["ocrd-dinglehopper"]
|
||||
kwargs["version"] = OCRD_TOOL["version"]
|
||||
super(OcrdDinglehopperEvaluate, self).__init__(*args, **kwargs)
|
||||
|
||||
def process(self):
|
||||
assert_file_grp_cardinality(self.input_file_grp, 2, "GT and OCR")
|
||||
assert_file_grp_cardinality(self.output_file_grp, 1)
|
||||
@cached_property
|
||||
def executable(self):
|
||||
return 'ocrd-dinglehopper'
|
||||
|
||||
log = getLogger("processor.OcrdDinglehopperEvaluate")
|
||||
def process_page_file(self, *input_files: Optional[OcrdFileType]) -> None:
|
||||
|
||||
assert self.parameter
|
||||
metrics = self.parameter["metrics"]
|
||||
textequiv_level = self.parameter["textequiv_level"]
|
||||
gt_grp, ocr_grp = self.input_file_grp.split(",")
|
||||
|
||||
input_file_tuples = self.zip_input_files(on_error="abort")
|
||||
for n, (gt_file, ocr_file) in enumerate(input_file_tuples):
|
||||
if not gt_file or not ocr_file:
|
||||
# file/page was not found in this group
|
||||
continue
|
||||
gt_file = self.workspace.download_file(gt_file)
|
||||
ocr_file = self.workspace.download_file(ocr_file)
|
||||
page_id = gt_file.pageId
|
||||
|
||||
log.info("INPUT FILES %i / %s↔ %s", n, gt_file, ocr_file)
|
||||
|
||||
file_id = make_file_id(ocr_file, self.output_file_grp)
|
||||
report_prefix = os.path.join(self.output_file_grp, file_id)
|
||||
|
||||
# Process the files
|
||||
try:
|
||||
os.mkdir(self.output_file_grp)
|
||||
except FileExistsError:
|
||||
pass
|
||||
cli_process(
|
||||
gt_file.local_filename,
|
||||
ocr_file.local_filename,
|
||||
report_prefix,
|
||||
metrics=metrics,
|
||||
textequiv_level=textequiv_level,
|
||||
# wrong number of inputs: let fail
|
||||
gt_file, ocr_file = input_files
|
||||
# missing on either side: skip (zip_input_files already warned)
|
||||
if not gt_file or not ocr_file:
|
||||
return
|
||||
# missing download (i.e. OCRD_DOWNLOAD_INPUT=false):
|
||||
if not gt_file.local_filename:
|
||||
if config.OCRD_MISSING_INPUT == 'ABORT':
|
||||
raise MissingInputFile(gt_file.fileGrp, gt_file.pageId, gt_file.mimetype)
|
||||
return
|
||||
if not ocr_file.local_filename:
|
||||
if config.OCRD_MISSING_INPUT == 'ABORT':
|
||||
raise MissingInputFile(ocr_file.fileGrp, ocr_file.pageId, ocr_file.mimetype)
|
||||
return
|
||||
|
||||
page_id = gt_file.pageId
|
||||
|
||||
file_id = make_file_id(ocr_file, self.output_file_grp)
|
||||
cli_process(
|
||||
gt_file.local_filename,
|
||||
ocr_file.local_filename,
|
||||
file_id,
|
||||
self.output_file_grp,
|
||||
metrics=metrics,
|
||||
textequiv_level=textequiv_level,
|
||||
)
|
||||
|
||||
# Add reports to the workspace
|
||||
for report_suffix, mimetype in [
|
||||
[".html", "text/html"],
|
||||
[".json", "application/json"],
|
||||
]:
|
||||
output_file_id = file_id + report_suffix
|
||||
output_file = next(self.workspace.mets.find_files(ID=output_file_id), None)
|
||||
if output_file and config.OCRD_EXISTING_OUTPUT != 'OVERWRITE':
|
||||
raise FileExistsError(f"A file with ID=={output_file_id} already exists {output_file} and neither force nor ignore are set")
|
||||
self.workspace.add_file(
|
||||
file_id=output_file_id,
|
||||
file_grp=self.output_file_grp,
|
||||
page_id=page_id,
|
||||
mimetype=mimetype,
|
||||
local_filename=file_id + report_suffix,
|
||||
)
|
||||
|
||||
# Add reports to the workspace
|
||||
for report_suffix, mimetype in [
|
||||
[".html", "text/html"],
|
||||
[".json", "application/json"],
|
||||
]:
|
||||
self.workspace.add_file(
|
||||
file_id=file_id + report_suffix,
|
||||
file_grp=self.output_file_grp,
|
||||
page_id=page_id,
|
||||
mimetype=mimetype,
|
||||
local_filename=report_prefix + report_suffix,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
ocrd_dinglehopper()
|
||||
|
Loading…
Reference in New Issue