mirror of
https://github.com/qurator-spk/eynollah.git
synced 2025-06-10 04:39:54 +02:00
🎨 unncesssary if True
This commit is contained in:
parent
68d5c0d523
commit
a65caa4d25
1 changed files with 71 additions and 84 deletions
|
@ -171,17 +171,9 @@ class eynollah:
|
||||||
|
|
||||||
if img.shape[1] < img_width_model:
|
if img.shape[1] < img_width_model:
|
||||||
img = cv2.resize(img, (img_height_model, img.shape[0]), interpolation=cv2.INTER_NEAREST)
|
img = cv2.resize(img, (img_height_model, img.shape[0]), interpolation=cv2.INTER_NEAREST)
|
||||||
|
|
||||||
margin = True
|
|
||||||
|
|
||||||
if margin:
|
|
||||||
kernel = np.ones((5, 5), np.uint8)
|
|
||||||
|
|
||||||
margin = int(0 * img_width_model)
|
margin = int(0 * img_width_model)
|
||||||
|
|
||||||
width_mid = img_width_model - 2 * margin
|
width_mid = img_width_model - 2 * margin
|
||||||
height_mid = img_height_model - 2 * margin
|
height_mid = img_height_model - 2 * margin
|
||||||
|
|
||||||
img = img / float(255.0)
|
img = img / float(255.0)
|
||||||
|
|
||||||
img_h = img.shape[0]
|
img_h = img.shape[0]
|
||||||
|
@ -203,7 +195,6 @@ class eynollah:
|
||||||
else:
|
else:
|
||||||
index_x_d = i * width_mid
|
index_x_d = i * width_mid
|
||||||
index_x_u = index_x_d + img_width_model
|
index_x_u = index_x_d + img_width_model
|
||||||
|
|
||||||
if j == 0:
|
if j == 0:
|
||||||
index_y_d = j * height_mid
|
index_y_d = j * height_mid
|
||||||
index_y_u = index_y_d + img_height_model
|
index_y_u = index_y_d + img_height_model
|
||||||
|
@ -254,9 +245,6 @@ class eynollah:
|
||||||
|
|
||||||
prediction_true = prediction_true.astype(int)
|
prediction_true = prediction_true.astype(int)
|
||||||
|
|
||||||
del model_enhancement
|
|
||||||
del session_enhancemnet
|
|
||||||
|
|
||||||
return prediction_true
|
return prediction_true
|
||||||
|
|
||||||
def calculate_width_height_by_columns(self, img, num_col, width_early, label_p_pred):
|
def calculate_width_height_by_columns(self, img, num_col, width_early, label_p_pred):
|
||||||
|
@ -1252,7 +1240,6 @@ class eynollah:
|
||||||
id_indexer_l = 0
|
id_indexer_l = 0
|
||||||
if len(found_polygons_text_region) > 0:
|
if len(found_polygons_text_region) > 0:
|
||||||
self.xml_reading_order(page, order_of_texts, id_of_texts, id_of_marginalia, found_polygons_marginals)
|
self.xml_reading_order(page, order_of_texts, id_of_texts, id_of_marginalia, found_polygons_marginals)
|
||||||
|
|
||||||
for mm in range(len(found_polygons_text_region)):
|
for mm in range(len(found_polygons_text_region)):
|
||||||
textregion = ET.SubElement(page, 'TextRegion')
|
textregion = ET.SubElement(page, 'TextRegion')
|
||||||
textregion.set('id', 'r%s' % id_indexer)
|
textregion.set('id', 'r%s' % id_indexer)
|
||||||
|
@ -1282,9 +1269,9 @@ class eynollah:
|
||||||
points_co += ','
|
points_co += ','
|
||||||
points_co += str(int((all_found_texline_polygons[mm][j][l][1] + page_coord[0]) / self.scale_y))
|
points_co += str(int((all_found_texline_polygons[mm][j][l][1] + page_coord[0]) / self.scale_y))
|
||||||
else:
|
else:
|
||||||
points_co = points_co + str(int((all_found_texline_polygons[mm][j][l][0][0] + page_coord[2]) / self.scale_x))
|
points_co += str(int((all_found_texline_polygons[mm][j][l][0][0] + page_coord[2]) / self.scale_x))
|
||||||
points_co = points_co + ','
|
points_co += ','
|
||||||
points_co = points_co + str(int((all_found_texline_polygons[mm][j][l][0][1] + page_coord[0]) / self.scale_y))
|
points_co += str(int((all_found_texline_polygons[mm][j][l][0][1] + page_coord[0]) / self.scale_y))
|
||||||
elif curved_line and abs(slopes[mm]) > 45:
|
elif curved_line and abs(slopes[mm]) > 45:
|
||||||
if len(all_found_texline_polygons[mm][j][l]) == 2:
|
if len(all_found_texline_polygons[mm][j][l]) == 2:
|
||||||
points_co += str(int((all_found_texline_polygons[mm][j][l][0] + all_box_coord[mm][2] + page_coord[2]) / self.scale_x))
|
points_co += str(int((all_found_texline_polygons[mm][j][l][0] + all_box_coord[mm][2] + page_coord[2]) / self.scale_x))
|
||||||
|
@ -1298,7 +1285,6 @@ class eynollah:
|
||||||
if l < len(all_found_texline_polygons[mm][j]) - 1:
|
if l < len(all_found_texline_polygons[mm][j]) - 1:
|
||||||
points_co += ' '
|
points_co += ' '
|
||||||
coord.set('points', points_co)
|
coord.set('points', points_co)
|
||||||
|
|
||||||
add_textequiv(textregion)
|
add_textequiv(textregion)
|
||||||
|
|
||||||
for mm in range(len(found_polygons_marginals)):
|
for mm in range(len(found_polygons_marginals)):
|
||||||
|
@ -2002,12 +1988,13 @@ class eynollah:
|
||||||
text_regions_p = text_regions_p_1[:, :] # long_short_region[:,:]#self.get_regions_from_2_models(image_page)
|
text_regions_p = text_regions_p_1[:, :] # long_short_region[:,:]#self.get_regions_from_2_models(image_page)
|
||||||
text_regions_p = np.array(text_regions_p)
|
text_regions_p = np.array(text_regions_p)
|
||||||
|
|
||||||
if num_col_classifier == 1 or num_col_classifier == 2:
|
if num_col_classifier in (1, 2):
|
||||||
try:
|
try:
|
||||||
regions_without_seperators = (text_regions_p[:, :] == 1) * 1
|
regions_without_seperators = (text_regions_p[:, :] == 1) * 1
|
||||||
regions_without_seperators = regions_without_seperators.astype(np.uint8)
|
regions_without_seperators = regions_without_seperators.astype(np.uint8)
|
||||||
text_regions_p = get_marginals(rotate_image(regions_without_seperators, slope_deskew), text_regions_p, num_col_classifier, slope_deskew, kernel=self.kernel)
|
text_regions_p = get_marginals(rotate_image(regions_without_seperators, slope_deskew), text_regions_p, num_col_classifier, slope_deskew, kernel=self.kernel)
|
||||||
except:
|
except Exception as e:
|
||||||
|
self.logger.error("exception %s", e)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if self.plotter:
|
if self.plotter:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue