diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..24027a2 Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..4cc9a01 --- /dev/null +++ b/.gitignore @@ -0,0 +1,165 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +.vscode/ +images/ +models/ +logs/ \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..1c50bb3 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,6 @@ +torch +torchvision +torchaudio +Pillow +opencv-python==4.5.1.48 +tqdm diff --git a/result/.DS_Store b/result/.DS_Store new file mode 100644 index 0000000..02455de Binary files /dev/null and b/result/.DS_Store differ diff --git a/result/1AD7899.jpg b/result/1AD7899.jpg new file mode 100644 index 0000000..2e17755 Binary files /dev/null and b/result/1AD7899.jpg differ diff --git a/result/8273BXG7D.jpg b/result/8273BXG7D.jpg new file mode 100644 index 0000000..1d1e203 Binary files /dev/null and b/result/8273BXG7D.jpg differ diff --git a/result/A1214YZ.jpg b/result/A1214YZ.jpg new file mode 100644 index 0000000..07520d2 Binary files /dev/null and b/result/A1214YZ.jpg differ diff --git a/result/A1227BJ.jpg b/result/A1227BJ.jpg new file mode 100644 index 0000000..9503ecd Binary files /dev/null and b/result/A1227BJ.jpg differ diff --git a/result/A1251RP.jpg b/result/A1251RP.jpg new file mode 100644 index 0000000..1ce832f Binary files /dev/null and b/result/A1251RP.jpg differ diff --git a/result/A1260YD.jpg b/result/A1260YD.jpg new file mode 100644 index 0000000..137c90f Binary files /dev/null and b/result/A1260YD.jpg differ diff --git a/result/A1758BO.jpg b/result/A1758BO.jpg new file mode 100644 index 0000000..d5aa2e6 Binary files /dev/null and b/result/A1758BO.jpg differ diff --git a/result/A1788WC.jpg b/result/A1788WC.jpg new file mode 100644 index 0000000..5707b8d Binary files /dev/null and b/result/A1788WC.jpg differ diff --git a/result/A685PJ.jpg b/result/A685PJ.jpg new file mode 100644 index 0000000..1769f97 Binary files /dev/null and b/result/A685PJ.jpg differ diff --git a/result/A907R.jpg b/result/A907R.jpg new file mode 100644 index 0000000..2a20d25 Binary files /dev/null and b/result/A907R.jpg differ diff --git a/result/AB1092TY.jpg b/result/AB1092TY.jpg new file mode 100644 index 0000000..8dce388 Binary files /dev/null and b/result/AB1092TY.jpg differ diff --git a/result/AB1095ZU.jpg b/result/AB1095ZU.jpg new file mode 100644 index 0000000..00f1396 Binary files /dev/null and b/result/AB1095ZU.jpg differ diff --git a/result/AB1398GN.jpg b/result/AB1398GN.jpg new file mode 100644 index 0000000..131b2e2 Binary files /dev/null and b/result/AB1398GN.jpg differ diff --git a/result/AB1514HA.jpg b/result/AB1514HA.jpg new file mode 100644 index 0000000..da36ad2 Binary files /dev/null and b/result/AB1514HA.jpg differ diff --git a/result/AB1653DA.jpg b/result/AB1653DA.jpg new file mode 100644 index 0000000..659fce7 Binary files /dev/null and b/result/AB1653DA.jpg differ diff --git a/result/AB167ZIZ.jpg b/result/AB167ZIZ.jpg new file mode 100644 index 0000000..c874896 Binary files /dev/null and b/result/AB167ZIZ.jpg differ diff --git a/result/AB19661N.jpg b/result/AB19661N.jpg new file mode 100644 index 0000000..def5d9b Binary files /dev/null and b/result/AB19661N.jpg differ diff --git a/result/AB8027K.jpg b/result/AB8027K.jpg new file mode 100644 index 0000000..96ee6a2 Binary files /dev/null and b/result/AB8027K.jpg differ diff --git a/result/AB8966U.jpg b/result/AB8966U.jpg new file mode 100644 index 0000000..901b877 Binary files /dev/null and b/result/AB8966U.jpg differ diff --git a/result/AD1345UB.jpg b/result/AD1345UB.jpg new file mode 100644 index 0000000..09f7d3a Binary files /dev/null and b/result/AD1345UB.jpg differ diff --git a/result/AD8819RN.jpg b/result/AD8819RN.jpg new file mode 100644 index 0000000..244b755 Binary files /dev/null and b/result/AD8819RN.jpg differ diff --git a/result/AD8907BE.jpg b/result/AD8907BE.jpg new file mode 100644 index 0000000..7c7fa39 Binary files /dev/null and b/result/AD8907BE.jpg differ diff --git a/result/AD8946BE.jpg b/result/AD8946BE.jpg new file mode 100644 index 0000000..e101b7c Binary files /dev/null and b/result/AD8946BE.jpg differ diff --git a/result/AD9238LU.jpg b/result/AD9238LU.jpg new file mode 100644 index 0000000..a10fe7a Binary files /dev/null and b/result/AD9238LU.jpg differ diff --git a/result/AD9312DS.jpg b/result/AD9312DS.jpg new file mode 100644 index 0000000..0993ebc Binary files /dev/null and b/result/AD9312DS.jpg differ diff --git a/result/AD9679UB.jpg b/result/AD9679UB.jpg new file mode 100644 index 0000000..2b8b962 Binary files /dev/null and b/result/AD9679UB.jpg differ diff --git a/result/AG8646UF.jpg b/result/AG8646UF.jpg new file mode 100644 index 0000000..c12b56a Binary files /dev/null and b/result/AG8646UF.jpg differ diff --git a/result/AG8T35RN.jpg b/result/AG8T35RN.jpg new file mode 100644 index 0000000..89e28cb Binary files /dev/null and b/result/AG8T35RN.jpg differ diff --git a/result/AG9402JUK.jpg b/result/AG9402JUK.jpg new file mode 100644 index 0000000..60e056f Binary files /dev/null and b/result/AG9402JUK.jpg differ diff --git a/result/AG9575PG.jpg b/result/AG9575PG.jpg new file mode 100644 index 0000000..8569c3d Binary files /dev/null and b/result/AG9575PG.jpg differ diff --git a/result/B100VV.jpg b/result/B100VV.jpg new file mode 100644 index 0000000..b0135e1 Binary files /dev/null and b/result/B100VV.jpg differ diff --git a/result/B212LOH.jpg b/result/B212LOH.jpg new file mode 100644 index 0000000..bb0a0a0 Binary files /dev/null and b/result/B212LOH.jpg differ diff --git a/result/B2412PBA.jpg b/result/B2412PBA.jpg new file mode 100644 index 0000000..dc4fbd3 Binary files /dev/null and b/result/B2412PBA.jpg differ diff --git a/result/B2417BRT.jpg b/result/B2417BRT.jpg new file mode 100644 index 0000000..90dae60 Binary files /dev/null and b/result/B2417BRT.jpg differ diff --git a/result/B242LRH.jpg b/result/B242LRH.jpg new file mode 100644 index 0000000..7edf746 Binary files /dev/null and b/result/B242LRH.jpg differ diff --git a/result/B2438SIH.jpg b/result/B2438SIH.jpg new file mode 100644 index 0000000..640aeba Binary files /dev/null and b/result/B2438SIH.jpg differ diff --git a/result/B2461SYN.jpg b/result/B2461SYN.jpg new file mode 100644 index 0000000..7b7d282 Binary files /dev/null and b/result/B2461SYN.jpg differ diff --git a/result/B2469SOD.jpg b/result/B2469SOD.jpg new file mode 100644 index 0000000..7dcb8b0 Binary files /dev/null and b/result/B2469SOD.jpg differ diff --git a/result/B2477TBJ.jpg b/result/B2477TBJ.jpg new file mode 100644 index 0000000..d9ca0fe Binary files /dev/null and b/result/B2477TBJ.jpg differ diff --git a/result/B2487SLY.jpg b/result/B2487SLY.jpg new file mode 100644 index 0000000..146fdaa Binary files /dev/null and b/result/B2487SLY.jpg differ diff --git a/result/B2501KOB.jpg b/result/B2501KOB.jpg new file mode 100644 index 0000000..51e48f5 Binary files /dev/null and b/result/B2501KOB.jpg differ diff --git a/result/B2514SON.jpg b/result/B2514SON.jpg new file mode 100644 index 0000000..1087585 Binary files /dev/null and b/result/B2514SON.jpg differ diff --git a/result/B2516FFX.jpg b/result/B2516FFX.jpg new file mode 100644 index 0000000..9a1ed73 Binary files /dev/null and b/result/B2516FFX.jpg differ diff --git a/result/B2519TIU.jpg b/result/B2519TIU.jpg new file mode 100644 index 0000000..3aeff2d Binary files /dev/null and b/result/B2519TIU.jpg differ diff --git a/result/B2531SKS.jpg b/result/B2531SKS.jpg new file mode 100644 index 0000000..892478b Binary files /dev/null and b/result/B2531SKS.jpg differ diff --git a/result/B2590SBA.jpg b/result/B2590SBA.jpg new file mode 100644 index 0000000..c5475bc Binary files /dev/null and b/result/B2590SBA.jpg differ diff --git a/result/B25ZL.jpg b/result/B25ZL.jpg new file mode 100644 index 0000000..73afa93 Binary files /dev/null and b/result/B25ZL.jpg differ diff --git a/result/B2602BFE.jpg b/result/B2602BFE.jpg new file mode 100644 index 0000000..b584036 Binary files /dev/null and b/result/B2602BFE.jpg differ diff --git a/result/B2603TTE.jpg b/result/B2603TTE.jpg new file mode 100644 index 0000000..5528503 Binary files /dev/null and b/result/B2603TTE.jpg differ diff --git a/result/B2616TOE.jpg b/result/B2616TOE.jpg new file mode 100644 index 0000000..a9e9837 Binary files /dev/null and b/result/B2616TOE.jpg differ diff --git a/result/B2635TYM.jpg b/result/B2635TYM.jpg new file mode 100644 index 0000000..5027617 Binary files /dev/null and b/result/B2635TYM.jpg differ diff --git a/result/B2641UZD.jpg b/result/B2641UZD.jpg new file mode 100644 index 0000000..d020d4a Binary files /dev/null and b/result/B2641UZD.jpg differ diff --git a/result/B2649TGZ.jpg b/result/B2649TGZ.jpg new file mode 100644 index 0000000..510b473 Binary files /dev/null and b/result/B2649TGZ.jpg differ diff --git a/result/B2658KFX.jpg b/result/B2658KFX.jpg new file mode 100644 index 0000000..70bf394 Binary files /dev/null and b/result/B2658KFX.jpg differ diff --git a/result/B2695SZL.jpg b/result/B2695SZL.jpg new file mode 100644 index 0000000..2c111c4 Binary files /dev/null and b/result/B2695SZL.jpg differ diff --git a/result/B2707UKT.jpg b/result/B2707UKT.jpg new file mode 100644 index 0000000..ace929b Binary files /dev/null and b/result/B2707UKT.jpg differ diff --git a/result/B2708SZT.jpg b/result/B2708SZT.jpg new file mode 100644 index 0000000..52a7fc5 Binary files /dev/null and b/result/B2708SZT.jpg differ diff --git a/result/B2709RR.jpg b/result/B2709RR.jpg new file mode 100644 index 0000000..73fb57a Binary files /dev/null and b/result/B2709RR.jpg differ diff --git a/result/B2741POD.jpg b/result/B2741POD.jpg new file mode 100644 index 0000000..8fc814d Binary files /dev/null and b/result/B2741POD.jpg differ diff --git a/result/B2741TZA.jpg b/result/B2741TZA.jpg new file mode 100644 index 0000000..15d211f Binary files /dev/null and b/result/B2741TZA.jpg differ diff --git a/result/B2744PKX.jpg b/result/B2744PKX.jpg new file mode 100644 index 0000000..c0c819b Binary files /dev/null and b/result/B2744PKX.jpg differ diff --git a/result/B2745SZV.jpg b/result/B2745SZV.jpg new file mode 100644 index 0000000..ccc4571 Binary files /dev/null and b/result/B2745SZV.jpg differ diff --git a/result/B2770TRN.jpg b/result/B2770TRN.jpg new file mode 100644 index 0000000..5bc37de Binary files /dev/null and b/result/B2770TRN.jpg differ diff --git a/result/B2772KFI.jpg b/result/B2772KFI.jpg new file mode 100644 index 0000000..d4d10da Binary files /dev/null and b/result/B2772KFI.jpg differ diff --git a/result/B2785KF1.jpg b/result/B2785KF1.jpg new file mode 100644 index 0000000..4647294 Binary files /dev/null and b/result/B2785KF1.jpg differ diff --git a/result/B2811KKB.jpg b/result/B2811KKB.jpg new file mode 100644 index 0000000..44a7fb9 Binary files /dev/null and b/result/B2811KKB.jpg differ diff --git a/result/B2815SYR.jpg b/result/B2815SYR.jpg new file mode 100644 index 0000000..2639ee7 Binary files /dev/null and b/result/B2815SYR.jpg differ diff --git a/result/B2820KKL.jpg b/result/B2820KKL.jpg new file mode 100644 index 0000000..371ee78 Binary files /dev/null and b/result/B2820KKL.jpg differ diff --git a/result/B2833BYW.jpg b/result/B2833BYW.jpg new file mode 100644 index 0000000..35d709c Binary files /dev/null and b/result/B2833BYW.jpg differ diff --git a/result/B2863UOB.jpg b/result/B2863UOB.jpg new file mode 100644 index 0000000..5beba9f Binary files /dev/null and b/result/B2863UOB.jpg differ diff --git a/result/B2866.jpg b/result/B2866.jpg new file mode 100644 index 0000000..eeb928f Binary files /dev/null and b/result/B2866.jpg differ diff --git a/result/B2868BRE.jpg b/result/B2868BRE.jpg new file mode 100644 index 0000000..a7bb991 Binary files /dev/null and b/result/B2868BRE.jpg differ diff --git a/result/B308UPZ.jpg b/result/B308UPZ.jpg new file mode 100644 index 0000000..ef53225 Binary files /dev/null and b/result/B308UPZ.jpg differ diff --git a/result/B369SRZ.jpg b/result/B369SRZ.jpg new file mode 100644 index 0000000..7663ef2 Binary files /dev/null and b/result/B369SRZ.jpg differ diff --git a/result/B501CAM.jpg b/result/B501CAM.jpg new file mode 100644 index 0000000..d6b5c67 Binary files /dev/null and b/result/B501CAM.jpg differ diff --git a/result/B555UBU.jpg b/result/B555UBU.jpg new file mode 100644 index 0000000..f6e9ab4 Binary files /dev/null and b/result/B555UBU.jpg differ diff --git a/result/B678RKZ.jpg b/result/B678RKZ.jpg new file mode 100644 index 0000000..885c284 Binary files /dev/null and b/result/B678RKZ.jpg differ diff --git a/result/B762PD.jpg b/result/B762PD.jpg new file mode 100644 index 0000000..c27f1f8 Binary files /dev/null and b/result/B762PD.jpg differ diff --git a/result/B806NB.jpg b/result/B806NB.jpg new file mode 100644 index 0000000..f8a06f4 Binary files /dev/null and b/result/B806NB.jpg differ diff --git a/result/B811MZS.jpg b/result/B811MZS.jpg new file mode 100644 index 0000000..d579410 Binary files /dev/null and b/result/B811MZS.jpg differ diff --git a/result/B889YPL.jpg b/result/B889YPL.jpg new file mode 100644 index 0000000..3f0d0b2 Binary files /dev/null and b/result/B889YPL.jpg differ diff --git a/result/BZ648SF7.jpg b/result/BZ648SF7.jpg new file mode 100644 index 0000000..865eb96 Binary files /dev/null and b/result/BZ648SF7.jpg differ diff --git a/result/E2519.jpg b/result/E2519.jpg new file mode 100644 index 0000000..ba32d3f Binary files /dev/null and b/result/E2519.jpg differ diff --git a/src/.DS_Store b/src/.DS_Store new file mode 100644 index 0000000..bbb945b Binary files /dev/null and b/src/.DS_Store differ diff --git a/src/apps/__init__.py b/src/apps/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/apps/char_detection.py b/src/apps/char_detection.py new file mode 100644 index 0000000..1bd48f0 --- /dev/null +++ b/src/apps/char_detection.py @@ -0,0 +1,123 @@ +''' +@Author : Ali Mustofa HALOTEC +@Module : Character Detection Faster RCNN +@Created on : 19 Jul 2022 +''' +#!/usr/bin/env python3 +# Path: src/apps/char_detection.py +import os +import cv2 +import numpy as np +from PIL import Image +from src.utils.utils import download_and_unzip_model +import torch +import torchvision +from torchvision import transforms +from torchvision.models.detection.faster_rcnn import FastRCNNPredictor + + +class CharDetection: + + def __init__(self, root_path:str, model_config:dict) -> None: + ''' + Load model + @params: + - root_path:str -> root of path model + - model_config:dict -> config of model {filename, classes, url, file_size} + ''' + self.model_name = f'{root_path}/{model_config["filename"]}' + self.classes = model_config['classes'] + self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu') + self.__check_model() + self.model = self.__load_model() + + def __check_model(self, root_path:str, model_config:dict) -> None: + if not os.path.isfile(self.model_name): + download_and_unzip_model( + root_dir = root_path, + name = model_config['filename'], + url = model_config['url'], + file_size = model_config['file_size'], + unzip = False + ) + else: print('Load model') + + @staticmethod + def __image_transform(image) -> torch.Tensor: + return transforms.Compose([transforms.ToTensor()])(image) + + def __load_model(self) -> torch.nn.Module: + model = self.__fasterrcnn_resnet50_fpn() + model.load_state_dict(torch.load(self.model_name, map_location=self.device), False) + model.to(self.device) + return model.eval() + + def __fasterrcnn_resnet50_fpn(self)-> torch.nn.Module: + model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True) + in_features = model.roi_heads.box_predictor.cls_score.in_features + model.roi_heads.box_predictor = FastRCNNPredictor(in_features, len(self.classes)+1) + return model + + @staticmethod + def __filter_threshold(probs:dict, threshold:float) -> dict: + num_filtered = (probs['scores']>threshold).float() + keep = (num_filtered == torch.tensor(1)).nonzero().flatten() + final_probs = probs + final_probs['boxes'] = final_probs['boxes'][keep] + final_probs['scores'] = final_probs['scores'][keep] + final_probs['labels'] = final_probs['labels'][keep] + return final_probs + + @staticmethod + def __original_boxes(boxes:torch.Tensor, img_size:tuple,resized:int) -> torch.Tensor: + image_width, image_height = img_size[1], img_size[0] + boxes = torch.tensor([[ + (x_min/resized)*image_width, (y_min/resized)*image_height, \ + (x_max/resized)*image_width, (y_max/resized)*image_height] \ + for (x_min, y_min, x_max, y_max) in boxes.cpu().numpy()]) + return boxes + + @staticmethod + def __sort_by_boxes(probs:dict) -> dict: + x_min_list = [i[0] for i in probs['boxes']] + idx = [x_min_list.index(x) for x in sorted(x_min_list)] + probs['boxes'] = probs['boxes'][idx] + probs['scores'] = probs['scores'][idx] + probs['labels'] = probs['labels'][idx] + return probs + + def detect(self, image:np.array, size:int = None, + boxes_ori:bool = False, threshold:float = 0.5, sorted:bool = True) -> dict: + ''' + @params: + - image: numpy array of image + - size: int of image resize + - boxes_ori: bool of original boxes + - threshold: float of threshold + - sorted: bool of sorted by boxes + @return: + probs: dict of probs -> { + 'boxes' : [x_min, y_min, x_max, y_max], + 'scores': [float], + 'labels': [int] + } + ''' + im_shape = (image.shape[0], image.shape[1]) + image = cv2.resize(image, (size,size)) if size else image + image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) + image = self.__image_transform(image) + with torch.no_grad(): + probs = self.model([image])[0] + probs = self.__filter_threshold(probs, threshold) + if boxes_ori and size: + probs['boxes'] = self.__original_boxes(probs['boxes'],im_shape, size) + if sorted: + probs = self.__sort_by_boxes(probs) + return {k: v.cpu().numpy() for k, v in probs.items()} + + +if __name__ == '__main__': + char_detection = CharDetection('./models/text_detection.ali', ['text']) + image = cv2.imread('./images/1.jpg') + results = char_detection.detect(image, size=244, boxes_ori=True, threshold=0.01) + print(results) \ No newline at end of file diff --git a/src/apps/char_recognition.py b/src/apps/char_recognition.py new file mode 100644 index 0000000..91c2a35 --- /dev/null +++ b/src/apps/char_recognition.py @@ -0,0 +1,143 @@ +''' +@Author : Ali Mustofa HALOTEC +@Module : Character Recognition Neural Network +@Created on : 20 Jul 2022 +''' +#!/usr/bin/env python3 +# Path: src/apps/char_recognition.py + +import os +import cv2 +import numpy as np +from PIL import Image +from src.utils.utils import download_and_unzip_model +import torch +import torch.nn as nn +from torchvision import transforms + +class _NeuralNetwork(nn.Module): + def __init__(self, num_classes): + super(_NeuralNetwork, self).__init__() + + self.conv1 = nn.Sequential( + nn.Conv2d(3, 32, 3, padding=1), + nn.ReLU(), + nn.BatchNorm2d(32), + nn.Conv2d(32, 32, 3, stride=2, padding=1), + nn.ReLU(), + nn.BatchNorm2d(32), + nn.MaxPool2d(2, 2), + nn.Dropout(0.25) + ) + + self.conv2 = nn.Sequential( + nn.Conv2d(32, 64, 3, padding=1), + nn.ReLU(), + nn.BatchNorm2d(64), + nn.Conv2d(64, 64, 3, stride=2, padding=1), + nn.ReLU(), + nn.BatchNorm2d(64), + nn.MaxPool2d(2, 2), + nn.Dropout(0.25) + ) + + self.conv3 = nn.Sequential( + nn.Conv2d(64, 128, 3, padding=1), + nn.ReLU(), + nn.BatchNorm2d(128), + nn.MaxPool2d(2, 2), + nn.Dropout(0.25) + ) + + self.fc = nn.Sequential( + nn.Linear(128, num_classes), + ) + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + + x = x.view(x.size(0), -1) + return self.fc(x) + +class CharRecognition: + + def __init__(self, root_path:str, model_config:dict) -> None: + ''' + Load model + @params: + - model_name: str of model name + - classes: list of classes + ''' + self.model_name = f'{root_path}/{model_config["filename"]}' + self.classes = model_config['classes'] + self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu') + self.__check_model(root_path, model_config) + self.model = self.__load_model() + + def __check_model(self, root_path:str, model_config:dict) -> None: + if not os.path.isfile(self.model_name): + download_and_unzip_model( + root_dir = root_path, + name = model_config['filename'], + url = model_config['url'], + file_size = model_config['file_size'], + unzip = False + ) + else: print('Load model') + + def __load_model(self) -> nn.Module: + ''' + Load model from file + @return: + - model: nn.Module + ''' + model = _NeuralNetwork(len(self.classes)) + model.load_state_dict(torch.load(self.model_name, map_location=self.device)) + model.to(self.device) + return model.eval() + + @staticmethod + def __image_transform(image) -> torch.Tensor: + return transforms.Compose([ + transforms.Resize(size=(31,31)), + transforms.CenterCrop(size=31), + transforms.ToTensor(), + transforms.Grayscale(3), + transforms.Normalize(mean=(0.5,), std=(0.5,)) + ])(image) + + def recognition(self, image:np.array) -> dict: + ''' + Recognize character from image + @params: + - image: np.array + @return: + - result: dict -> {class: recognition, prob: confidence} + ''' + image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) + image = self.__image_transform(image) + if torch.device('cuda') == self.device: + image = image.view(1, 3, 31, 31).cuda() + else: + image = image.view(1, 3, 31, 31) + + with torch.no_grad(): + output = self.model(image) + + output = nn.functional.log_softmax(output, dim=1) + output = torch.exp(output) + prob, top_class = torch.topk(output, k=1, dim=1) + res_class = self.classes[top_class.cpu().numpy()[0][0]] + res_prob = round((prob.cpu().numpy()[0][0]), 2) + return { + 'text': res_class, + 'conf': res_prob + } + +if __name__ == '__main__': + char_recog = CharRecognition('./models/text_recognition.ali') + image = cv2.imread('./images/1_10043.jpg') + result = char_recog.recognition(image) + print(result) \ No newline at end of file diff --git a/src/apps/ocr.py b/src/apps/ocr.py new file mode 100644 index 0000000..389c25d --- /dev/null +++ b/src/apps/ocr.py @@ -0,0 +1,131 @@ +import cv2 +from configs.models import * +import numpy as np + +class Ocr: + def __init__(self, detection:str = None, recog:str = None) -> None: + self.detection = detection + self.recog = recog + if detection: + from char_detection import CharDetection + self.detection_model = CharDetection(model_name=detection, classes=['text']) + if recog: + from char_recognition import CharRecognition + self.recog_model = CharRecognition(model_name=recog) + + def char_detection(self, image:np.array, image_size:int = 244, + threshold:float = 0.5, boxes_ori:bool = True, det_sorted:bool = True) -> dict: + ''' + Detect character from image + @params: + - image: np.array -> image to be detected + - image_size: int -> size of image to be detected + - threshold: float -> threshold for detection + - boxes_ori: bool -> if True, return boxes in original image + - det_sorted: bool -> if True, return boxes in sorted order + @return: + - result: {'boxes': np.array, 'confidences': np.array, 'labels': np.array} + ''' + # assert error if model is not loaded + assert self.detection, 'Model is not loaded' + + result_det = self.detection_model.detect(image, image_size, + boxes_ori, threshold, sorted=det_sorted) + return result_det + + def char_recognition(self, image: np.array) -> dict: + ''' + Read single character from image + @params: + - image: np.array -> image to be read + @return: + - result: {'text': str, 'conf': float} + ''' + # assert error if model is not loaded + assert self.recog, 'Model is not loaded' + + return self.recog_model.recognition(image) + + def __calculate_confidence(self, result:dict) -> float: + return round(sum([i['conf'] for i in result])/len(result),2) + + def __marger_text(self, result:dict) -> str: + return ''.join([i['text'] for i in result]) + + def visualize_result(self, image:np.array, results:list) -> np.array: + ''' + Visualize result of OCR + @params: + - image: np.array -> image to be draw + - results: list -> result of OCR(output type advanced) + @return: + - image: np.array -> image with result + ''' + # Draw boxes + for box in results: + x_min, y_min, x_max, y_max = box['box'] + cv2.rectangle(image, (x_min, y_min), (x_max, y_max), (0, 255, 0), 1) + # Draw text + text = box['text'] + cv2.putText(image, text, (x_min, y_min), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + return image + + def ocr(self, image:np.array, det_size:int = 244, boxes_ori:bool = True, + det_threshold:float=0.5, det_sorted:bool=True, output_type:str='normal') -> None: + ''' + Read text from image using Text Detection and Recognition + @params: + - image: np.array -> image to be read + - det_size: int -> size of image to be detected + - boxes_ori: bool -> if True, return boxes in original image + - det_threshold: float -> threshold for detection + - det_sorted: bool -> if True, return boxes in sorted order + - output_type: str -> 'normal' or 'advanced' + @return: + - result: result of detection and recognition + - normal : {'text': str, 'conf': float} + - advanced : [{'text': str, 'conf': float, 'box': tuple}] + ''' + # assert error if output type not in ['normal', 'advanced'] + assert output_type in ['normal', 'advanced'], 'Output type is not valid' + # Char detection + res_detection = self.char_detection(image=image, image_size=det_size, + threshold=det_threshold, boxes_ori=boxes_ori, det_sorted=det_sorted) + boxes = res_detection['boxes'].astype(int) + + # Char recognition + print(res_detection) + result_recognition = list() + for box in boxes: + x_min, y_min, x_max, y_max = box + image_crop = image[y_min:y_max, x_min:x_max] + res_recognition = self.char_recognition(image_crop) + if output_type == 'normal': + result_recognition.append(res_recognition) + elif output_type == 'advanced': + result_recognition.append({ + 'text': res_recognition['text'], + 'conf': res_recognition['conf'], + 'box': box}) + + # Output type + if output_type == 'normal': + confidence = self.__calculate_confidence(result_recognition) + text = self.__marger_text(result_recognition) + result = {'confidence': confidence, 'text': text} + elif output_type == 'advanced': + result =result_recognition + return result + +if __name__ == '__main__': + import glob + ocr = Ocr(detection='./models/text_detection.ali', recog='./models/text_recognition.ali') + for i in glob.glob('/Users/alimustofa/Halotec/Datasets/JASAMARGA/REPORT/LPR/old_images/*.jpg'): + image = cv2.imread(i) + + result = ocr.ocr(image, output_type='advanced', det_threshold=0.9) + text_ocr = ''.join([i['text'] for i in result]) + cv2.imwrite(text_ocr+'.jpg', ocr.visualize_result(image, result)) + print( + ''.join([i['text'] for i in result]), + ) \ No newline at end of file diff --git a/src/configs/__init__.py b/src/configs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/configs/models.py b/src/configs/models.py new file mode 100644 index 0000000..6bd3505 --- /dev/null +++ b/src/configs/models.py @@ -0,0 +1,25 @@ +import os +import string + +#========================== DIRECTORY Models ===================================== +ROOT = os.path.normpath(os.path.dirname(__file__)) + +DIRECTORY_MODEL = os.path.expanduser('~/.Halotec/Models') + +DIRECTORY_LOGGER = os.path.expanduser('~/.Halotec/logger') + +#============================ MODELS ====================================== +MODELS = { + 'char_recognition' : { + 'filename' : 'char_recognition.ali', + 'classes' : string.digits+string.ascii_uppercase, + 'url' : 'https://huggingface.co/spaces/Alimustoofaa/ocr-license-plate-indonesia/resolve/main/saved_model/models.zip', + 'file_size' : 8326131 + }, + 'char_detection' : { + 'filename': 'char_detection.ali', + 'classes': ['text'], + 'url' : 'https://github.com/Alimustoofaa/1-PlateDetection/releases/download/plate_detection_v2/plate_detection_v2.pt', + 'file_size' : 14753191 + }, +} diff --git a/src/configs/ocr.py b/src/configs/ocr.py new file mode 100644 index 0000000..6a9a030 --- /dev/null +++ b/src/configs/ocr.py @@ -0,0 +1,5 @@ +import string + +LABEL = string.digits+string.ascii_uppercase +label_dict = {idx : label for idx, label in enumerate(LABEL)} +num_classes = len(label_dict) \ No newline at end of file diff --git a/src/networks/__init__.py b/src/networks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/networks/ocr.py b/src/networks/ocr.py new file mode 100644 index 0000000..2ae0633 --- /dev/null +++ b/src/networks/ocr.py @@ -0,0 +1,49 @@ +import torch.nn as nn +from configs.ocr import num_classes + +class Net(nn.Module): + def __init__(self): + super(Net, self).__init__() + + self.conv1 = nn.Sequential( + nn.Conv2d(3, 32, 3, padding=1), + nn.ReLU(), + nn.BatchNorm2d(32), + nn.Conv2d(32, 32, 3, stride=2, padding=1), + nn.ReLU(), + nn.BatchNorm2d(32), + nn.MaxPool2d(2, 2), + nn.Dropout(0.25) + ) + + self.conv2 = nn.Sequential( + nn.Conv2d(32, 64, 3, padding=1), + nn.ReLU(), + nn.BatchNorm2d(64), + nn.Conv2d(64, 64, 3, stride=2, padding=1), + nn.ReLU(), + nn.BatchNorm2d(64), + nn.MaxPool2d(2, 2), + nn.Dropout(0.25) + ) + + self.conv3 = nn.Sequential( + nn.Conv2d(64, 128, 3, padding=1), + nn.ReLU(), + nn.BatchNorm2d(128), + nn.MaxPool2d(2, 2), + nn.Dropout(0.25) + ) + + self.fc = nn.Sequential( + nn.Linear(128, num_classes), + ) + + + def forward(self, x): + x = self.conv1(x) + x = self.conv2(x) + x = self.conv3(x) + + x = x.view(x.size(0), -1) + return self.fc(x) \ No newline at end of file diff --git a/src/utils/__init__.py b/src/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/utils/utils.py b/src/utils/utils.py new file mode 100644 index 0000000..b954d17 --- /dev/null +++ b/src/utils/utils.py @@ -0,0 +1,73 @@ +import os +import cv2 +import base64 +import requests +import numpy as np +from tqdm import tqdm +from pathlib import Path +from zipfile import ZipFile + +def download_and_unzip_model(root_dir:str, name:str, + url:str, file_size:int, unzip:bool = False): + ''' + Checking model in model_path + download model if file not found + @params: + root_dir(str): The root directory of model. + name(str): The name of model. + url(str): The url of model. + file_size(int): The size of model. + unzip(bool): Unzip the model or not. + ''' + Path(root_dir).mkdir(parents=True, exist_ok=True) + + # check if model is already or not + print(f'Downloading {root_dir.split("/")[-1]} model, please wait.') + response = requests.get(url, stream=True) + + progress = tqdm(response.iter_content(1024), + f'Downloading model', + total=file_size, unit='B', + unit_scale=True, unit_divisor=1024) + save_dir = f'{root_dir}/{name}' + with open(save_dir, 'wb') as f: + for data in progress: + f.write(data) + progress.update(len(data)) + print(f'Done downloading {root_dir.split("/")[-1]} model.') + + # unzip model + if unzip: + with ZipFile(save_dir, 'r') as zip_obj: + zip_obj.extractall(root_dir) + print(f'Done unzip {root_dir.split("/")[-1]} model.') + os.remove(save_dir) + +def encode_image2string(image): + image_list = cv2.imencode('.jpg', image)[1] + image_bytes = image_list.tobytes() + image_encoded = base64.b64encode(image_bytes) + return image_encoded + +def decode_string2image(image_encoded): + jpg_original = base64.b64decode(image_encoded) + jpg_as_np = np.frombuffer(jpg_original, dtype=np.uint8) + image = cv2.imdecode(jpg_as_np, flags=1) + return image + +def resize_image(image, size_percent): + ''' + Resize an image so that its longest edge equals to the given size. + Args: + image(cv2.Image): The input image. + size_percent(int): The size of longest edge. + Returns: + image(cv2.Image): The output image. + ''' + width = int(image.shape[1] * size_percent / 100) + height = int(image.shape[0] * size_percent / 100) + dim = (width, height) + + # resize image + resized = cv2.resize(image, dim, interpolation = cv2.INTER_AREA) + return resized \ No newline at end of file diff --git a/text_detection.ipynb b/text_detection.ipynb new file mode 100644 index 0000000..aeef9e5 --- /dev/null +++ b/text_detection.ipynb @@ -0,0 +1,592 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/opt/homebrew/lib/python3.9/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import cv2\n", + "import numpy as np\n", + "from PIL import Image\n", + "from matplotlib import pyplot as plt\n", + "import matplotlib.patches as patches\n", + "\n", + "\n", + "import torch\n", + "import torchvision\n", + "from torchvision import transforms\n", + "from torchvision.models.detection.faster_rcnn import FastRCNNPredictor\n", + "\n", + "import albumentations as A\n", + "from albumentations.pytorch.transforms import ToTensorV2\n", + "\n", + "device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "class CharDetection:\n", + " def __init__(self, model_name:str, classes:list) -> None:\n", + " self.model_name = model_name\n", + " self.classes = classes\n", + " self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')\n", + " self.model = self.__load_model()\n", + "\n", + " @staticmethod\n", + " def __image_transform(image) -> torch.Tensor:\n", + " return transforms.Compose([transforms.ToTensor()])(image)\n", + "\n", + " def __load_model(self) -> torch.nn.Module:\n", + " model = self.__fasterrcnn_resnet50_fpn()\n", + " model.load_state_dict(torch.load(self.model_name, map_location=self.device), False)\n", + " model.to(self.device)\n", + " return model.eval()\n", + "\n", + " def __fasterrcnn_resnet50_fpn(self)-> torch.nn.Module:\n", + " model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)\n", + " in_features = model.roi_heads.box_predictor.cls_score.in_features\n", + " model.roi_heads.box_predictor = FastRCNNPredictor(in_features, len(self.classes)+1)\n", + " return model\n", + "\n", + " @staticmethod\n", + " def __filter_threshold(probs:dict, threshold:float) -> dict:\n", + " num_filtered = (probs['scores']>threshold).float()\n", + " keep = (num_filtered == torch.tensor(1)).nonzero().flatten()\n", + " final_probs = probs\n", + " final_probs['boxes'] = final_probs['boxes'][keep]\n", + " final_probs['scores'] = final_probs['scores'][keep]\n", + " final_probs['labels'] = final_probs['labels'][keep]\n", + " return final_probs\n", + "\n", + " @staticmethod\n", + " def __original_boxes(boxes:torch.Tensor, img_size:tuple,resized:int) -> torch.Tensor:\n", + " image_width, image_height = img_size[1], img_size[0]\n", + " boxes = torch.tensor([[\n", + " (x_min/resized)*image_width, (y_min/resized)*image_height, \\\n", + " (x_max/resized)*image_width, (y_max/resized)*image_height] \\\n", + " for (x_min, y_min, x_max, y_max) in boxes.cpu().numpy()])\n", + " return boxes\n", + " \n", + " @staticmethod\n", + " def __sort_by_boxes(probs:dict) -> dict:\n", + " x_min_list = [i[0] for i in probs['boxes']]\n", + " idx = [x_min_list.index(x) for x in sorted(x_min_list)]\n", + " probs['boxes'] = probs['boxes'][idx]\n", + " probs['scores'] = probs['scores'][idx]\n", + " probs['labels'] = probs['labels'][idx]\n", + " return probs \n", + "\n", + " def detect(self, image:np.array, size:int = None, \n", + " boxes_ori:bool = False, threshold:float = 0.5, sorted:bool = True) -> dict:\n", + " '''\n", + " Args:\n", + " - images : Numpy.Array ->\n", + " '''\n", + " im_shape = (image.shape[1], image.shape[0])\n", + " image = cv2.resize(image, (size,size)) if size else image\n", + " image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))\n", + " image = self.__image_transform(image)\n", + " with torch.no_grad():\n", + " probs = self.model([image])[0]\n", + " probs = self.__filter_threshold(probs, threshold)\n", + " if boxes_ori and size:\n", + " probs['boxes'] = self.__original_boxes(probs['boxes'],im_shape, size)\n", + " if sorted:\n", + " probs = self.__sort_by_boxes(probs)\n", + " return {k: v.cpu().numpy() for k, v in probs.items()}\n", + "\n", + "\n", + "\n", + "\n", + "char_detection = CharDetection('./models/text_detection.ali', ['text'])" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'boxes': array([[11.86109069, 50.18588 , 17.78047777, 76.3388877 ],\n", + " [22.61613852, 50.53432627, 27.35313309, 77.12569721],\n", + " [23.92049417, 50.51673333, 35.74157014, 78.21390984],\n", + " [26.93010896, 51.20184805, 32.03692624, 78.96562426],\n", + " [31.03131979, 50.9152495 , 35.84480542, 79.37770781],\n", + " [32.11097877, 53.13152939, 34.70335989, 76.93529992],\n", + " [34.21225804, 51.55257194, 37.35505132, 78.26963218],\n", + " [35.52049355, 50.2777568 , 40.2434603 , 77.74667902],\n", + " [49.1127779 , 50.9270992 , 54.50939891, 79.81615429],\n", + " [54.97524605, 54.84159779, 59.88662901, 79.10287951]]),\n", + " 'labels': array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1]),\n", + " 'scores': array([0.9523825 , 0.9936494 , 0.22991937, 0.9894872 , 0.9672788 ,\n", + " 0.05240851, 0.06529855, 0.98821217, 0.9909609 , 0.7661257 ],\n", + " dtype=float32)}" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "image = cv2.imread('./images/2.jpg')\n", + "char_result = char_detection.detect(image, size=244, boxes_ori=True, threshold=0.01)\n", + "char_result" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]\n" + ] + }, + { + "data": { + "text/plain": [ + "array([[11.86109069, 50.18588 , 17.78047777, 76.3388877 ],\n", + " [22.61613852, 50.53432627, 27.35313309, 77.12569721],\n", + " [23.92049417, 50.51673333, 35.74157014, 78.21390984],\n", + " [26.93010896, 51.20184805, 32.03692624, 78.96562426],\n", + " [31.03131979, 50.9152495 , 35.84480542, 79.37770781],\n", + " [32.11097877, 53.13152939, 34.70335989, 76.93529992],\n", + " [34.21225804, 51.55257194, 37.35505132, 78.26963218],\n", + " [35.52049355, 50.2777568 , 40.2434603 , 77.74667902],\n", + " [49.1127779 , 50.9270992 , 54.50939891, 79.81615429],\n", + " [54.97524605, 54.84159779, 59.88662901, 79.10287951]])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "x_min_list = [i[0] for i in char_result['boxes']]\n", + "idx = [x_min_list.index(x) for x in sorted(x_min_list)]\n", + "print(idx)\n", + "char_result['boxes'][idx]" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "FasterRCNN(\n", + " (transform): GeneralizedRCNNTransform(\n", + " Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n", + " Resize(min_size=(800,), max_size=1333, mode='bilinear')\n", + " )\n", + " (backbone): BackboneWithFPN(\n", + " (body): IntermediateLayerGetter(\n", + " (conv1): Conv2d(3, 64, kernel_size=(7, 7), stride=(2, 2), padding=(3, 3), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (maxpool): MaxPool2d(kernel_size=3, stride=2, padding=1, dilation=1, ceil_mode=False)\n", + " (layer1): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(256, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (1): FrozenBatchNorm2d(256, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(256, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(64, eps=0.0)\n", + " (conv3): Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(256, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (layer2): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(256, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(256, 512, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): FrozenBatchNorm2d(512, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (3): Bottleneck(\n", + " (conv1): Conv2d(512, 128, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv2): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(128, eps=0.0)\n", + " (conv3): Conv2d(128, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(512, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (layer3): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(512, 1024, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): FrozenBatchNorm2d(1024, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (3): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (4): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (5): Bottleneck(\n", + " (conv1): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(256, eps=0.0)\n", + " (conv3): Conv2d(256, 1024, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(1024, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " (layer4): Sequential(\n", + " (0): Bottleneck(\n", + " (conv1): Conv2d(1024, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(2048, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " (downsample): Sequential(\n", + " (0): Conv2d(1024, 2048, kernel_size=(1, 1), stride=(2, 2), bias=False)\n", + " (1): FrozenBatchNorm2d(2048, eps=0.0)\n", + " )\n", + " )\n", + " (1): Bottleneck(\n", + " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(2048, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " (2): Bottleneck(\n", + " (conv1): Conv2d(2048, 512, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn1): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv2): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False)\n", + " (bn2): FrozenBatchNorm2d(512, eps=0.0)\n", + " (conv3): Conv2d(512, 2048, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", + " (bn3): FrozenBatchNorm2d(2048, eps=0.0)\n", + " (relu): ReLU(inplace=True)\n", + " )\n", + " )\n", + " )\n", + " (fpn): FeaturePyramidNetwork(\n", + " (inner_blocks): ModuleList(\n", + " (0): Conv2d(256, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " (1): Conv2d(512, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " (2): Conv2d(1024, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " (3): Conv2d(2048, 256, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " (layer_blocks): ModuleList(\n", + " (0): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (2): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (3): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " )\n", + " (extra_blocks): LastLevelMaxPool()\n", + " )\n", + " )\n", + " (rpn): RegionProposalNetwork(\n", + " (anchor_generator): AnchorGenerator()\n", + " (head): RPNHead(\n", + " (conv): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (cls_logits): Conv2d(256, 3, kernel_size=(1, 1), stride=(1, 1))\n", + " (bbox_pred): Conv2d(256, 12, kernel_size=(1, 1), stride=(1, 1))\n", + " )\n", + " )\n", + " (roi_heads): RoIHeads(\n", + " (box_roi_pool): MultiScaleRoIAlign(featmap_names=['0', '1', '2', '3'], output_size=(7, 7), sampling_ratio=2)\n", + " (box_head): TwoMLPHead(\n", + " (fc6): Linear(in_features=12544, out_features=1024, bias=True)\n", + " (fc7): Linear(in_features=1024, out_features=1024, bias=True)\n", + " )\n", + " (box_predictor): FastRCNNPredictor(\n", + " (cls_score): Linear(in_features=1024, out_features=2, bias=True)\n", + " (bbox_pred): Linear(in_features=1024, out_features=8, bias=True)\n", + " )\n", + " )\n", + ")" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)\n", + "# get number of input features for the classifier\n", + "in_features = model.roi_heads.box_predictor.cls_score.in_features\n", + "# replace the pre-trained head with a new one\n", + "model.roi_heads.box_predictor = FastRCNNPredictor(in_features, 2)\n", + "model.load_state_dict(torch.load('./models/text_detection.ali', map_location=device), False)\n", + "model.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "transforms_img = transforms.Compose([\n", + " # transforms.Resize(size=(244,244)),\n", + " transforms.ToTensor()\n", + "\n", + " ])\n", + "\n", + "def filter_probs(probs, iou_thresh=0.3):\n", + " num_filtered = (probs['scores']>iou_thresh).float()\n", + " keep = (num_filtered == torch.tensor(1)).nonzero().flatten()\n", + " final_probs = probs\n", + " final_probs['boxes'] = final_probs['boxes'][keep]\n", + " final_probs['scores'] = final_probs['scores'][keep]\n", + " final_probs['labels'] = final_probs['labels'][keep]\n", + " return final_probs" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [], + "source": [ + "def original_boxes(boxes, img_size,resized):\n", + " image_width, image_height = img_size[1], img_size[0]\n", + " boxes = torch.tensor([[\n", + " (x_min/resized)*image_width, (y_min/resized)*image_height, \\\n", + " (x_max/resized)*image_width, (y_max/resized)*image_height] \\\n", + " for (x_min, y_min, x_max, y_max) in boxes.cpu().numpy()])\n", + " return boxes" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'boxes': tensor([[ 40.8441, 27.9818, 49.3989, 42.7060],\n", + " [ 88.6962, 28.1993, 98.4423, 44.1957],\n", + " [ 48.6350, 28.3514, 57.8577, 43.7248],\n", + " [ 64.1490, 27.8397, 72.6785, 43.0498],\n", + " [ 56.0416, 28.1927, 64.7346, 43.9529],\n", + " [ 21.4208, 27.7889, 32.1110, 42.2703],\n", + " [ 99.2837, 30.3668, 108.1535, 43.8008]], dtype=torch.float64),\n", + " 'labels': tensor([1, 1, 1, 1, 1, 1, 1]),\n", + " 'scores': tensor([0.9936, 0.9910, 0.9895, 0.9882, 0.9673, 0.9524, 0.7661])}" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "resize = 244\n", + "output_ori = True\n", + "image_ori = cv2.imread('./images/2.jpg')\n", + "image = cv2.resize(image_ori, (resize,resize)) if resize else image_ori\n", + "image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))\n", + "image_t = transforms_img(image)\n", + "model.eval()\n", + "with torch.no_grad():\n", + " probs = model([image_t])[0]\n", + "probs = filter_probs(probs, iou_thresh=0.4)\n", + "if resize and output_ori:\n", + " probs['boxes'] = original_boxes(probs['boxes'], image_ori.shape, resize)\n", + "probs" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "40.84407105993051 27.981817026607324 49.39894185300734 42.70596457309411\n", + "88.69621082993805 28.199302860947903 98.44234729204021 44.19572179825579\n", + "48.634972900640776 28.3514365211862 57.85773246014705 43.72476715338034\n", + "64.14895104580238 27.839749633288775 72.67848799658604 43.0498140053671\n", + "56.04163723304624 28.192741456578986 64.73464859509077 43.9529456466925\n", + "21.420775726193284 27.788875704906026 32.11101209921915 42.27029318887679\n", + "99.28365332181338 30.366835140791096 108.15346433295578 43.8007679923636\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUEAAAC8CAYAAADmdRxCAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9W8y1XXcWdI17vX9pS/dQsNImNZEzEiEheMAJbjCo3WBNdyClBfyNBqOJiVRPOO2RyoFR/5ZCSUtpIxRqrRuCaQiJIS3GuCsaQlBLGxqM7BLh/541hwdzbK4x5rzXWs/zvJuf9J3f97xrrfuemzHnHPMa1xxz3vMWVcXH8DF8DB/DL9VwfGgBPoaP4WP4GD5k+AiCH8PH8DH8kg4fQfBj+Bg+hl/S4SMIfgwfw8fwSzp8BMGP4WP4GH5Jh48g+DF8DB/DL+nwKhAUkd8mIv+7iPwVEfnutyXUx/AxfAwfw/sK8tJ9giJyAfB/APitAH4OwE8D+HZV/d/enngfw8fwMXwM7za8eUXa3wTgr6jqXwUAEfkTAL4RwCkIfuEXfL5+6Zd8EaAKx15FBWH+LZAlD6V/+atfEdmk0ZbmNKRcHNYcSUK/ySJZJpGOZJJyZ6nNvHNSoLfHaRtt0smNX7v6zqbSGmcry/wn6iOz7f137YZVMKF/BNkGCkCHQqGzHXUjQenz1hbCsgkOF6R+FEHWNqyxQld1ShJymV519Vp7NbMWa6elvZbYRW22erkt4CR+Hxb38tM+sDYxvDylS48FFmafaDtkpV2kSH/jb/7tv6mqX/moBBxeA4K/BsD/Tb9/DsA/2SOJyKcBfBoAvuSLfzl+37d/A56enjDGwFDF0KnsU8EUQwcUOhVkakwA2xgDAKA6oApKB+hUsUw3f0HV0ynGMPV0DRDgOCSu8V+pg65q6oPrOA4IBDIUGDMfl1P8z2Ty/3qY5c1yj4vgeONeCvU2XOHLZDyOI+M0Te/lg8ofswFxtX7gPMcY0R+qg+6pDV7gIhPojuOIv09dLpDjwOU4IDIB6PBhLlLKOC4XyOWAyIHjuACqeBoDYyg++8kn+OTpiuv1OnVFZ9/1fgmZLFy8zIvgchG8OS74vDdvcAggl5T7CKRUk/MIUIIIBAdELlAA14l3eLpO2Z6expRtDHzyyWynp6u3YdbxqgNQxWHaeRyz/E+9ueDzPu+C4xB86lNvICJ4IwcuImQ8p0zedwAwrHwAkGPeUwNT1mccMx/v39AFERzHUfQ7+7mSCO/3s/EQ7S+YgwNkIMaAktzc93NcCA65QOSYGOBjpSn40AEdVK4ApurLWFUA/+H3/pn/cyvkA+E1ILgzXktrqepnAHwGAL7mH/3V+qVf+qW4Xp8wxhXXobheTZHGkwHIdQIj2X9vjCHTQl2vs6On0ebB78OeWZg1ngrkkMlCxbiH0OCEKVWafbJwNhIMQhjUjmN+XscVQ6+lBTa8a69QwXYU1wHo9eptF00adfW8m5JxvqUMVXQQVtgAAIqS+7/R3tYPWWZmO6A4IJF+jIEnERz2+xDBaHJ6CwqAqw7IOOYVmYPzyQbeJ59ccb0OXMfA9XolOdemK/0tEuTAjd/T9WpgnAP8cFYoauX7QPQbB4BZ7oj2mnm60QgW6O1HjJWZq/ery34dA09PPqBn3wwz3q5zBwSX45LzBTMCCRims6S7UIUcpuPWd97/3nmuJ7oxgNGa0QapEw7vztgTdHOUOuhyfkFGRGIIqQIiwwhKthj4e9S76tyRSJ2y4PXhNSD4cwC+hn5/NYCfv5XgOA58wRd8Aa7XJ6iOqejXaQ0+uX6CoVPphw2GYQNyiA/UyRTngNYEhQKC/IkAgbDTsmk4ZwDc6sSODP+IRyXjvMg0T9cC28kcOwMCcpCIynINOjDG7GJnYUVUEAiLYIyxAM2WzQYIAlAEC+/lxwzDmTlgALKIEkpYmEUMot0UeIIRRCaNMGYAA5qrzkHJepGAU8ZzaxHqN+trHYpxGKOAAENwyKzPnGmgTYV5SjzhT00uhWCoDeSRo7nYGqRexWUz2sL3dOA6FKqTxR0wY9HaCReP70YmQZAbgcs8BAGCpfdJrxUAjAFex8CVQQuuW/GDcwkZvQ3JbO5ZY5nBzHomlKbxmmNsbAlCAdJhQxVI07xTzGeG14DgTwP4tSLyjwH46wC+DcDvuJXgcrngS7/kiw3kBsZ1KvzQCYKqOi2/GgvQORCuT8OuTYAc15l2Tp8BxFTXlaVZZge0wVT6ap0kAYwKA8KZ5YxnVgxwEKx+HHXaMS+Ve+H7oWlSAUNRYmfznhxGRCBzoDRmGVPjk84P1uT1LkZAA3hHs7wb9Su8ZiorJkCo36cBpIBiBNvIKafdd/cBUv5gMibT1dwc13CVELhUcsuNTQOJZRJjtGPeP2z6LjS+PQttzEIMOGeLWVvad8XUMTPKp0wkaFzOOrwa0fbXgUHG1dmlBO9KBqzmKoC4n5OYmrMwERw6GWLIpcnk2cBPklFD5mU6aLrDusb6FVlGPwlJ5W0qoX8eL8WRkoH4mDWFW4w73KikewdC9XtheDEIquqTiPx+AP8Npt36flX9X2+luVwu+BVf8eWueVOZzJ9SQVDxNK54uj5hXAc++9l57+n6yfQNmUIMVQNKxdOTxfnkKabY6UN0pmIMQa+4XjWd1F4nABMRDTICtHxgxxCm6ZBZMHF2YcC18dHNEVAvLd4+AXy+JnoU0Fw8iqzs2S/Fz+rCsp8061pFSwH8Wir7HH+poNMdIbMs60/RWecDOQTSYM8vFxwxKIxQwQF3RD8pgcVax43QEJnxp0xuYBROpq823g4xf673FyYjj7q4XNFOR35634ZuRAVqYIYZ+jBzGXB9VIyrtYsmkMPabgxnPyPayQnddMFkA7hvb9alstIAQRGI+4/tOoMZqPyZabIunqr3NDMq9Sc3QnyatGHT7d4h4d+E67ob7oPGDxt1GwUHBLNLPiwThKr+JICffDS+A386oA3VdUBxmUBjjt3jOp3t43I1BR24XE1Rx1TAYYxx6MDTJ9PRejkuBqzD4pEj+DowFBjjwNWBhn2C0JqmTaUP8QmdK5BMdDDLdITnVqKezvJcO1R2nIvi9SnI0snVmls/RNxTJzYBYVzrcWJAxpyjWHxnk6p+wX1YNeT0LlXe23BAIQac044kyLFvsvopE5SK5I6xabcKKCmlHJgAMcekGtNAyAdjIdpAJIa8tOmvNtBAlbfkktiZ152xqSa5tvInoI/ZNs44bTo+xWdPrbstbNrt/lxk3VQVOA7IGFZn1vkalNq2VTbTxMyHAT53BMw6kBFoLib/vSzmicypuuT48TK5toWdvh4DXweCzw2qinG94vLmMhvgcuA4ANXDVmljyM3psrG5p182F02uWv0G7tcYY+CTTz6BjoGnp4FhbHLYVov5G7Ygo3i6XvFkiw8h15hT7qeneU/HgC1PwFv6OOZqZoCgKq5XWwg4LjlNg1nsYI2+0oYV5Dy+5kpqD0VZfHrEirkBuNM+sE8mpAIUX55aRF8o4lk5M4xISyAM5Mq5j7dgInC2Nhe5snRnW1mGAwRYNo+T2EJsMm8kzinS6zinxXIg/JK56h4QZPhk/r/SpqYHQvIGc2TIdIBzWZyJOa/WaIPFYBkQixn4rCGsTIl0YrMPZ+UDtnIzqrfOigZ0GOvq24Y2hlGzHiLkIir94+1ayUR8d/Kw8SuKLZX7roLIVxVqK9jhlzR5/FMoT/GOfmV4ryAITOouI71rTs3TLrlFsWnVIbNhMKdb6qYSs9GOMaBjKvgYA8cxMK6TVao7gC8XAyxji9cDl+uRA0gV13ENug0AQ1gBTR7b+oE25ZyARJ1t2TiojeFDbQ9W6ZHas8AEQXazVxaoRhPWzTSRGQ2MamHDNyekUyGSlSukk7vsQwZio5xHUrXCtioIef6ChAxruZiKktxdEkZIL7/lf6gz1c7cLKYBkaqXYDrZSi4JXMYGibnCajWhpNzMoBz8HsvYq+N97eW727GQNzIInUlxjTzC1AOPLGtbRvqq6oZK6yxDUhPDJxhpcmGvg2fZMlMMpa5AC42FoNeE9wqC16cr/s7f/ls43hxzzg+ZwAbFVSfvCqDBakV2iwEHAByCy+d9yiPBpxFzSgFcx9y64r4TBrDrde5LvI7rZJ1PV1yf5naXp6enZHCR++woX8V++uQppi5mlpHTqpnQ/ZwyspNLDWIcjawTaofzVLew4q6nNI2A5qLOMnbp0wdAtfC79paWMtsmitFkX2oGLTFRFxmEiklTmEA5Smz/ptiD/XbSHMZ2MlEyLp4m0TZymLZxB/snzEMSmANQZoPUeEfyp6V1Ez1tn+DKoqoUDoYy91oC0dgHySmtoWMhxoTIfbCCI0DS0ooUNaCmShATYpce72ysYjJBkbq/NXzGlwvSfaTBGCeD7NPn3Dv4mvBeQXDowN//+/8f5CILCA5TdwdB/+Q/BsiYAtq9y8Wm2HIYh0QAXWwKJT+a3396yoWYoXMl+nq94jqusdjiznVPG5t4jYEmGEkp5wr3HWpa70WTjS3qBG+PUjYXBAhaO45pActm0oiMGBgzLRe3Yz9spZMd8CrrWkDjHLq/k4Vtyg2WCbizK0BQUVaGg9WUaXEynaU4kjUZI6cV86s5g+W6ETeNf8hfy5VdgnY3GcWtg3f9SlQbAvHd3T2bpsdihgZHglvpaSagkm1SBIhuN81j+ZoBLknb+Fx8eNxfSvtNGwgClfx4fB/jPrYvl0me0v3iwLiI9qzw3n2Cn/3sP8BxSSboHDxAUAz8DgM0WYHR2Zm4dZP5dMBMdxgQZhi29aJ2vlPs2flvLhcoDlxkYFwu0z/45jL1KB7jQoDqfJJh4M2n3ti0GzOuMVD3SQ7MFe9c1R6RF+CKbT5QjEocfKCm6Z1MQ+ZGWj2OKGsHVkdstUmF8uk/sAGsNjoKG6A+NO9euiY2A2QnUS2LwIbSTPaXU+ET3nU/lJFB0OB1ifIdIur0kdPVryxRgpTANvM7lsX3ZhoIcKRfDNEbq6IYCvcXOnU1trSVVUK/q4/NM2Ug7UbTf1f2NdXH9JLICfv2RHL709b9I0eQFzEWeXnzJvpFqIyDQPC4+PifC5/HMYHxjqbdDe8XBMeYILiZ8rIfyTulg583RjA7A0GB4Hq5zNXby7E4gAd8imDXnEHCt0tM8AUwN/s4kxufStloGu3TZtWBT65PsWKN2LYzF2KGPw1znY9Zqf8hn9iIB+TIuueWHpryjoniURcDn6spxGzf2t5sxXMfpZU3C6LI1XoLXSuKHHIS7zgor4ZaWzBcBvmcRPoAyz2MJNyS641wRg02wHie25651RTNT3VotBtkBZQqirezbO7btU1b5qyJn+SRXRGNpaXcps5hGmIGFTKnTDvXlG/zceBzECyAeetxTqnpjuPApz71qSQxSP2cQHcpOHAd84mzy+WCN29eD2HvfWEEyEG+VVWePs3NUrPzyYKr+sZRhEWCLf/L9QCOaRkPSFnZPAwwRWQ+YwkCDu8sA53pJ3R6RzLZv3NVW3DBMR8Vg869aTqAYStcONJqHgId9qfAsMWeCoLUCApbrMEEQHvMKpzg5sg6xmSQql4XmipZnWZbDaiaz0dnPYuDG7aOGewFCzBmB0n8lbK8UzeDN0qJkbvt+Cgzm7wzlCyDMYZZhF8Qak+lpFELL0iA7u2PbUIev8lbDIYBjJCcHt+Z2A5oy7ST2jzq3co8IKYKR5QHGJhcDjJqUu/ZDvGor6YeTznm87yTDHB7kB4QmA0bH/7M9SFHsDS/BnoeO33UCNLioHY5LjguB968edOm1XM8HEclQwBwjLnYeBwH3hyX1xLB9w+CU1/cFCUjS+U0e6fep7lxdFh89WceNZkRtE5sROgRLfc7MJD6FMFhyK2pIvxFzCAFxDJFbEuP4LjMJhxXf1rliosxQX9C5c0bA8kxoNeZ69XQ13Y5WF2NYZkSxMKMO8hCQRHT2qEjHy8cOQ1nNZ+Pnx0WZ7btVX3SST2zjHQpbZrXdMbVfCDfINE7EZ3LpfwbjW3T6djGQjL1GXcSep7Ktbw1Da43b31QL9Azv+omH2wO3iQXgd+MOG54BWXBwEl9ADDVt8pPltuLkAlmooJDHepnuxcQdL+MZXC5XHAcl3mAQ2zCp9VugbHAdD1lg7GMB95c5uLL1d1R5tKqbO0y63YcVp6PMwnCcZFLTHMvby4TzN5ckoiUZs4ZnYvkZw/EvX/YQLD7o8JBHdZC2wCayhr0mxc6kEqOAK75PZblCWgLm4SzF43BqTD2oAHFJuSk6GElIVA91g5DWluhuIfptOoBPWa+qjbMAwQdAJ055KkfbhSatTDAtjaUNAIivJ9SMVegaQuHTjYYKWzE9geQav10/drus1uePV5lS8cSGhRT31S6RgW7/SwsMQdbz3kCIG/zkVaFZCpepZ7X+qtV38X1iMKYz3WodTXJVwDw+vvPI6e1av3pnsDJlsxgNBA8jgOXizMv8dIK6ZgA63v3zIXSXCsznwmCPGY938vhfrtLzLR84dKDj79DDhwy83tz5MlDwaxbIwcIhp8Q1QjtaPYzwnsHQV49df9eLrWDBn1S9e43LKu8jQUIgaOXwYDIgOtHEc086bNMFwQqIweNbWIdAWw56MWYqR/15SEI3KGRJu7FoLNYThegjl22R85kPZyxiDEbiadjxnEJxsmlD3MflA3kqsGmfUuC7SiKBnBiOCidb7dQM0zpb8rB1VcSb/1OBpyNkX5biwOZTxIA8NEpzoysgZ0VkvhWp+ltHAEORxTFbR+uENRpbe9DviDtZhohY12ekfcfEp4Moua/k6ZZdAldCJGgufigCeEu4+Wg54nNTSTWNuGrO2QCTQic7X7YNNZBDGZgAroFCZTI04UO5Kbni0+HD1+0owaxflY/OUfnWDlggKaK8eQPOIwcT6YDSSJ8AdIXF3XRrZeEDwaCQ+eud15hmso7cmBafG/8vnAAIBSHf/f0sDi9wQSYvrXoKBv8DaRcsVUVeq15XC2Bn/8WnUXp66aEqaR9elWmQjaKgyf4CBSJrRA5kuYA80eN5oKRlHzHEIyDQJAWH9go+TPAvj9uuDi0+p1M2BcF4uIKFC3s+6L2JQOaG65gccrcrj77ndNjIkNAAZK1rbMsZu75b4auZ5426hBymFz+XGsIZe2GPLjAnQj+JFKf2nM75QqslnaeU8IJhNOJZvEvvChSFxpnH6fszszkIN+eg7C1nAMlYG4iTcN9XAgExfYr9u2vrpvRN74zY373x1XH9VqIkYiDZ31G2olSvfay8P5BEKSOrZNmBAlLEMNLa3pWUfeN1fsncXxwcaMRa8qpNZfnDA2kvg52CSYXYpw8wePumS4bi1E6jqZnZs19sBf4VA22JrHKm75FgMYbCCAuxwQQVTtb0RkrwAPC28nL9r6am8MvFtce6B9K4KIkgy6gs/vNlryosLjj3CmvAUcMXjM6R57laA1H00sEqfZnvQdNPcUGuAPgYf4wb9HII/pkAk3oqDMlQiOJDrA82HABiPNzxHrVP+Ers0eAJoN7nfC4uyQfH5tsTxJQnCn6+oTleRgbjCpK9rXATtcRtc8U33tnzgQGtVC20hgSh0GIXILBuS448PnDC94Hen3ClQgOg1pp69Y3/unb0V4b3u8Wmfa3Zw3M6mase0B/8/aCay32nambgJ4jbdNxVc3z2MzSFpDvomyY7DZOA8j4pKdEEq+DMkJiUCWT8BrPb0dLy3LV3nBwGk05/dy3OJbMgDGMVasjL0xwCX4CUJTlLNGnQUkJZ5xDS/mH1KcUqj8tvxxm9Oa6gUmhiE22IvNMyCSBmmBCrCzAYWO4xdNJ5qE6wp3g/t15YnX6vpTuARLP1HbGyW05i5hl+UJf1KXMOUY1DM52vd+t7yZQqv1JuBhcQiFdDzeLVVTVG01xNT/JESA4ZZz7Y2sfHzhwiMCf3t+6uSgITZtmfUYwy7cRPtgWmWAArcNnYMqeVoGnU7dDKkPyDPX/I49icbBvVN7Os+0sv2b5nQIgxUX/HhHyC3GavC5iD85TVBrwaT0rI7K7UX9ni32wp+rnMVk+fTrEtxvZYo3bBB3zaAJFDJjeds5+uYw5R7LV5TEHTbIqe+onmJRN0XWCxgRBkpuyFvoS2K61eZ2R+ermxY9zirpn23oR/jqBZcrKbSxiA9YYEPcFsz00YynMaHPxLnROvJ+yBfmPqF22PVV6GS3KZ9DU63E8vudIIFh0tmRq/ncfbea/Vm8HP9ShlEvItoyFWo5SPWJ+8gCheDTcBUER+X4AXwfgF1X119m1rwDwIwC+FsBfA/Atqvr/3i+uOjO9YYqviDfzeqq3UNEoMbDLrNKxrvL6/Z0PawFNr8sYMeUKRV4zrZ9A7mWjqUlMiwAnLjltIbWH5CosA5mQhnv2sR9xajb8mQMRscUOYl1+ksKkgvP6xfdrWkZDl0GysL+op0Z+GltehrE65IJMHHHFj1V5z4lbkmQw3m5dP6L9HFxyNuHAtWN0pByUl21Dof7w6EdsBjQWdBHbP8otbrKIPfIFAezYuDzZWww4aNdCsMUJJHmWYxkZ1hcI0In7NMYg02dcnu/1dvB2tP66Pj25RKGTYcBdXl9swTQo4doIYVJnu/EPXT4Z0t3vy9ZCJVbusm5vARseefz4jwL4be3adwP4c6r6awH8Oft9PzRj0n8XjND87PGThufv/R8pAl3rtHv3u+at5dp5/QwoN8LwidXxaRX0e7LJ64xabp74Xa57eiJZiC1B9t2nVoVd2ECf3zV/099xVBDpv7fXDrpmn4fF802x8UfpDzlwkbmN4rBVTl8R9c3AveyQQQ6Kt/5Juzdly72gx1HrcbRPcbCV1n5A1q/cJxD275qdRSaOrrHS8LUcIMG2Gaz4s7O5rv/8RJM/3rmkiQR0veXdjeLmd79W4uJWHJzm/Zpwlwmq6p8Xka9tl78RwG+x7z8A4KcA/IHHi+UhOVVj+BYQRn5FPd6py8ZcebmXrIUXPJQjSPqzwg8VUxnNAwrC9LuCm/omjYuMy3tKzmpNcvS6dQbKiTXQHD7xyFzjqBCvHBfsA9VYBTBZGCoT6G4HhcapIsxMo1gzRkrN5H636mfMqVqWIcQKpQgiTLeC7TrjnVs45sKM7STwo8qiSRoYkp754s1kaLOsSzyTulpkMf9TvGdFsn1l1PZyH3bso9P16LQwkMA8TJj6tCKoyzLyvnq8mf/B/XEyFkIfA2RWuyqwo+NI9wK4j2MyPy7fHpMcijg1yA919XTevgzKWTbNlKifs2XntxyrPs0eLUZN8dLwUp/gr1bVXwAAVf0FEflVjyVjZ+pG/DBubt14+FgOUqc3OeCWogpYbJmcWZYyf6S4Ec2VmS7bkIx+ruXe7xgaS6ehDsqT63Alp+8cp+GxA1Ut35z6AVQexxTY/HKpsFLjiZ16TIheB38FRBHYU0Bt31qAjHOkBKswPbb1ZMiwN8DNTcNuvFwwWfbd8ZMQtVH4taXgdlMAsCcpApdJKfnTxXcA7FXX+iOO71r62OPkRhr37+UTObqxd9k/3QSzfazjYIWR1C1/GkNxdH0OUNLw9wFKb7+b6djYVyJiK81CDUfyrLL4d2VorO3zivDOF0aE3jv85V/yRf0eBG0gbECrE6vd1JXznLk8CESbPB4NtxZStuWdMMTTPCh+IcmOSQ9lVr+wdSXJgrmA7wfDoSSkt1NEktEAR1s6B74sX3AczEkSVAvDRPZj9+UdjkwKY/z2XU2mYL+02gwsx6L179kGJ/1YGt0BUZcozKYDn5xtZcEdR/Mbr5xHa/httXqf+J4XaSqVUGr6WAgqN7NfBdOlMGcog6NA6d3AimkENU5wkrCdnk+eDqTRdJn4fCydGXT3Rd5lE3fCS0Hwb4jIVxkL/CoAv3gWUfm9w1/1q5q0xKfSZGyY2AqElvf2WrC2TZo+7Qnb3KYut8DsJXc4z0fAkFTrMbC7XbBrqeXaQcoGFbcBDVBHv1tiu28t2z/L4HSusskeJdJnZt5WDLDM6jD9i3E6hsTA7nmKNLf3AXt1Yz2ktlatDywpLHVe2gOgM5oc/2TM1U/MPlFOv+psiQGzWT2JTG9rB7sSCPka+8v8JBOm75NSc5utvnTEq0KT908ZSk0CRGeHLQtTRTYySlrboKD5K8JLQfDHAfxuAN9jn3/m0YS+PcG/S7NmtXMcNKpyYxMn8nfllpUNhlJSm2dnSWvPNiymBoBdl9MvkoyplrXvnKIQuqhnGX7nQzNzkeQI5Ts8Bo+3+C5UhzpYV3mB8FMh43I/luSN2pQacF+78vO1PpUvMmsAY4CsASDNg9MfFS2ySFHBi8oPYKF4ha2UrHoFK2jmFhWha4j27/9GeZJp58p9ZUHLCIhqpP6uWk+kQLPUqFL0WbkKVeRhHEo+8p0kBsg+TrIudSyke5IBXEn2blS0yfl60OvhkS0yP4y5CPIrReTnAPxBTPD7URH5vQD+LwDf/HiRuSfq6AB4UkHljr4T0kdVlcfuVl0WHiQMgm0Q2hd/x/EckJ5f7lgv8mlXxv2NA6wskVNO8WrKUgNpaZa4hKzF2BYgTMvOn4WhSYydNqQbCgry2eJN5QvYbBhgt+shshuaaJNkfpucS3ppYkabST6WF6mbzFttc7CIegri8Z2Qt0hwImPe4wNRNQyM74c88XkrQ/z5uJgYZm+ZOxDvnc506zgJrFHbLD/sdes8DqWmn4+7zmPbYrVcABXNl0618etAOKh+hfm1CsfiVAn3MeFeeGR1+NtPbv0zLynQQeoWoC1T1gJsDwBhdEwFHAeRYH2nWfEUwMEhZeFPStJk2H9fZWXZPKMpdz/ZiuUoANjZXolPJ0pjXeDpcYFs//DxdWkDCFarfSt0l8OjoaeYoCjljuNjJ5FVLgdeJy4rT+xEr3wNNkV38kSJYNZr5yXgz9iy7zuWfNuUWupX9Ji6piaxNCI2HT8rl6H0RMebiFEnHpfZHCEv2d0qn1SJ17FFJn6j32+LE36wJ0a2VuFOGg/n40g3CtXuz8ww2cTcBHIrSfcdrXKuA8nTlVK7n9M++f2wfj1UwKff5QQS38pU9CcAACAASURBVKRM6ZoPDk2Zdu19z7VQNxJrx5FTf+zZ79h6sXmXBPsh+fNWGbXVGyOluM4yFt8gpXwIlhlstmxk3pzvVWbp6rs0crsOSB2p3lK3sVSm3nXInrEvyH9Sm42h9OmttEZQuK4DYixQROIorXshATB1Jw3EHVKw6ftUb1mA822E9wyCnR+tQBj26GywGYUOfTybLoSe1ZYPpzSVVSQR4GxQ7WsiLW2P6Cph9n+nr5uB5SpfFejE06ibH6FMrkRaGF4X8bYNkiz7NF6Wt/4+B8ez/Xl7AKwiiWTr+4Ar+REL4dl0BdCmH+pGdqUeypltwmzHjVGM9m5Ig9Y+weiUo6DMbCInWa7lLKLS2MJkQ9baX4KarJTd+otDroJvGHufQ2jMw0jaQq0900wUrB3vLHyAk6Un94oTR+x631bhDeTsR4HyiFl1qDcF2c74aD8R0fcA3jY4ClyXzKzTGvJ1n1nmIVxy1Nn9zW1RNICv5pExFra1LCy5M13SmlP+OTXO8ey/B9LL5I+p+QLMI9uIFnu1VsB8RoiKT1CuDMANgEjVhpKx1O1VHdiWrR+NfRWZrBFyexXpmrS6R6Vk6fNuTOIQ10V/VoCNDNpjb24OtWkRkCefVw3otRbbv5dsVbt+iB3PWgwA6/gA5JhP5y1AakxP/cmilXFzn8587USd0EPJccgA6NGpnVcTc18v74UPc7x+sxvLCq/962A24q3Q1hCuiM2nc94cDIDUGZRA7NnNJWXRSakAu7FO64puTGDjHm99CCunIEXhQc/tNH/nJgtZTbCaDO4LojrEULDsq0Pa84bnvBioW2FH2PcRkxUAOdVZtoScsFaYnOW1bpuO6MtGeT3/DXlGW9zSwFn7vjepHon70kW0c2Gie7pepDQMdr08bX+1/JgOl7r1/P3ppuxbN2qMs8Prmw2RbS1HGme3JeQaYlbpZ6kugfpWuvVXBkIUw11r4t9ylAC7Xn5e+CA+wcLcYqCamvjWfO1J8kDKeC11GTQoxj6ADivLqPsS7R82YOIgUv9I3KDp5Rqjjd0IhXML7CdqBJBZAhvQrRQAPIGgdrI8pqjN+pNSehnMqorcbNkjRgKBK//eUPW01bCcx84x4AAIbx9FeV1jsrP6dI6qv0tlI4tmRB8yfkrlHGB1VhCM6JgGdjIaP0LNctENFIqzp9WgD3hvEkzZM6BbkFgMH52pqdm+uknhOnQC1TFWuoyxT+84cPEZQumXltugR0yJNZc4xC6dBmvri653VabeFlGVzdh4O+H9gyCvNraWVruPaEBTypHHVDmAuILcb5gKhP69OpT99A4ATaHnYKPDJGP0zo+44wO4WznkYM0z+dYBxXnvnjcNZtPtw1LcRnsDNRNOg/toYyrOhBjkCXBKtsWa1+1HW95To8c02NtkGeH0xae+YUwUcaBmvMOa6sQS5JAy9qNumPxAWrtrp7GI+hsjSReWgZkAWBh+MzDBehwI1I6UjzaRmqf/S9MD5fttvAQQlratur4DKg6Hn+QMxHPYQ2tdpwGfJwBeLpdomZB1YYV5DFtGtn7iVWpUWc8AcG6laWqhOGX8zwkfgAmunVIho0fX1OkOmJRbWDuenpRCSBFBiqOs6P5v37dH5TUZRo6qVFipqZhNBiB6jtyrNg/oU8kKKlqTlSE+/4lB7THU9201i2yD1HXPTcuwNCru71kHU7AyIQm0x6BfvV6WQck7kSMacplWM4h5Yw/Y605xN0R3EUNhdu3CagoUx4+tgtDGd/JxlliSPlpvMD9sYBc/JgWhO9ks5RMOQFSvki514B4I7s7CXMCTphXz1RjM9pYqFFk83ezWULbTULU00wmIraaFPs/owfDeQTDekSuIl7uU+8y6CLQASRMaZMw3MCMPG9W6QjiVIFlO4pOxP8tzuLqUQzsc2MRupT/FWd118EkgWk2yn4wRL4hJXaoLeJN+pHLnlBli03/zgeVRWM4aqfF2imEyucP7EC4hGho5+iROJ2FjEuyV8hbkxt5kLzsmqPNRNdDA0cwvB7ovgFm/y4AeLiexLfqk3q3tiaxzNpdEHwC+OEWdMh158WKfGb0alFqOeqVCtj1SIabYouqvRKTDBzgOSn8F64WGesWYaYsQDG45jjaGhK2FzJed8Tu+KcO10mp5C+xVnpPd5UazG8fdu7HheXdmuynKzYHWWIQLGwx+dvgAx+u7Epv1Rvdl6KbT6OuyeBFwsTQIQ2iMcWUAIg1FsoT5vaw3Fybof0MRLy6KmyyEHRUSPkVNS6YGruIDjXHLP8XzPqLD1SogNx4454MQurN5WCNsld0biBYHphwEBsymNc8i5Pbvz4pGuuZKUCQT5LyZBQx/29NmitcBild0Z7zZTgqHwuwD18HIIPpe+SvGDXoZ9uOgC7q2UxUq4/irCyJGOdreelsr8JdWLs3S25DTkdDMongaOxOcErRia718VfgxaKXngoggdUozp1lHbfnV0pa7SyfvgfMl4YO+aMkZ4SNpgATAZheQVp+YkQjWZloLu/UEQydWw1jLfPGQv+h85EAOEHRhagaq6iQg5eM+VjcSHUxp84opVX3oPAdMyTAGohpb2wAnyeB5XqkKat/zfRlqFt22OYWjX2Lf3mjbdjLwi6qybfrQCygQrqdE+/hnf8kOP0ZWt9f0xzNrXbLxJxUbGITXO/dNGlaulper6vXK4ezv8AD1kbO8kE3NgHg9w4ACfha4+5wF8wVHKzgxYCIMdtUn1yUaQW0c5DgNyMuTqS1Tn0ZTEyA8pGGQSbIYo81aRYyzIKex3gYQvv/pMBshaHHANvc2gGq0ynU45ki0UY3jKtgsSXCC280X5IBwzMHO39nrPj72+ckCvtryJEXC2okTKJXqZO/6AJ/b1/cLVuufAjOnekRdKI7QQLBpYsRRG9wcXSSeg+6HYtT6tVckcr5wFmm/xYAwWJz3ia+2Z145Be6bRlatOt3y41MBZy+dXZGCSZuL+Yk1afdoVTT8grsyZzZHYd9a8ijszrVoa+Q9RmWzlUBO9uckMIhF65OsvV/ycxu5ucivWppj0eqSdx2bK4zvAqPEI/GfEz7MFhk4gFGoRnKJ4yyn+vusqxwYOP9Is2kqzU7Tds3LqtO0KcUw5jeGluPHHRjZwc6EkGXq6pJ1zTq5D7EeL5X75hZHfmuj0OfhsCu7Itugk2pE/J4gFh64FhLP7pqvCoohyYZ8mlxsuFrduhBNtmSCSNay+AQNAAuICsSOyYopZdIPbqRdU2RWNlCZWccCXEw5sJGbNlyPrKcgjUMWmGb4gG1UFwkfteumWh96+2VhewD0fxe9DoVvU1W3NUU2yoOMuxzuWnHDg2iT+abFDp1dmom89cmUtwNkrwkf4OXr9B3+3i2JztAWz/WcGU1aLbfQm6bXdgjDDnhgy+66KZd+81YZB8BYGLnObQP+/mFXMhNrAz63WJKxzBUmKrhiHYNOYti/xu10/1nt3eEGam2rBIKgaV0t56D7zsjmO8FTjlhNvxNiL5qV5cXHS+NpcOWzx7Nlom2UpTopYxNY3gTCBNwwuEeNPw3A0dp+hv6K0JqO2sy2qBxGNQcdrrqRdLlXXEbSvjTm6pYq2osslrb+naR8L0ueOEQyUTulnapunMc04d2H978wEriVnZVGurKkYDya8WKwEwCutK6DQJRULI/ENQYPxGcCotI1Xf+8PCCZaqFAvc5r16e8Ctacwv4aCyy5NMUtbbhMn88GVQs5C01bHqAmwRJ4StTDsAzYx8UDxYthmbINZWGCYWy2Q0iyEba/Hw/VgMbFOhPxt7w1PduB4JYj8TS7xfe13XXRqOeKlSUCtq1nM73i4gT5pTCBFQQ5F792yNzuc4QxoOHYARHNWJUxvwkbYrOp5lsJj5wn+DUA/hiAfwSzbz6jqn9IXvzazTWkLw0IsGqg5P3ln9F3nokpZETGpoE3lnivZBJl1L8RLND/rsOnZVLAiqc7s+hmFU/bwgAjxkcDQGchC6gh0vQ67RRtd82b0BVWeVyQ8EKDxxkyh+IX9LbQa96Hbflwfm6yhH+Y06JwuxjXgtzK4un8UbX0Myv8hfM7n9wjJ+v0wICQRtI2XcuxZUved1dUBs/+W6W4zw2xkEY6MPtAScaQJocJg/qYGXRDuptZKGV6ORR6yVdwqkgfZi1IGMY7lVoGyzvAPwCPMcEnAP+Oqv4PIvLFAP6SiPxZAN+J+drN7xGR78Z87eYz3jg3wxwI9AA12PJyPDNWPj3z38CKLFo7CiVOpYwMKMlEaw8kA+TvxAA1GVJhgmaFmcFNeVMRvfzCBLkqJ0xwAUK23lyn1i7+ZSEIzmhcmQ2M/bFtgzzA/Uk9vcdhA0TU3R8j42WLUgdqz94D8V1cLiZP1E/lX1OWLmRUt7HjSmHWUMbt7lu0arkuS1s0aVgEZtnPCS3rBFkCNovDLFCsjwu7LeSjg14FxIOm/lFHrp+2YUcV3/oCpUTZsr53AYSPHKr6CwD8zXJ/V0R+FsCvwatfu9kD07jKhbVEMf7g+79a4+b3phkFDBngfCPzOng8ufv/xlDbGwhchwNibmgVITbjKA2E9gUX1A0IxmfdlBErwini+TSh3GgKSd8UKfMSxxuVZlI5lQUOux8nFNPhpgoNWatRSMPlC81qMs4yRmF01Zj4rsNqIP2d8HwtH2DjGu0BpfH0rhab+A/8VhTfaYmn3qyyHBTjLUhE+GbYHoXGrSDE9zoQImddsV1rw/aWoGrPvZsuADh04DBdqlNmoXqsLUVzAK5Vi7URYS/Zq8OzfIIi8rUAfgOAv4gXvHZz8QvsY9i9ld5VMMxB0ZUmOzItXC5WtGeGtVk/0+aCn2Ht5vehDoDERgI7TU2ICQIoLhdeAKrybpqlAN9eDaqsXsY6knxLAtclg8SHRvydCjMAepK63WGtW4WcHCh53Zn1tnKS3LAwDu+rJW+uE62On44iBsuTNsbKTLKJqd6bevh2ljAQpZFT97bbVVD7ko9C86Kzn1jHSfYTW7DzEzOg9RVjN9Khjaqxfav4N5F18qbn8haNiHqv9X1XwMfhYRAUkS8C8CcB/Nuq+nce9V0IvXLzy9orNy3GoxLgkSbZ+TB299fv858ENiCOwFI3hKvPhDHb3zkb2Fd2t9N0fzN1uFMrPHeKVP2sJipPeyhnSxBzpTl1lbRFmsZGvV6aBmXLHtLOgNmib6Rmvqs8UqidnJEmV8q6+f0OgFzjPkm414K7Nivl3QieavfQ2JK+6dCRNb4rV/les6vlpV+ixBPrtzL1ZCa3GR+8J7FuuU/wGjrfAXJVtQMoxHYGzJILEHeQe756v9XwEAiKyKcwAfCHVPVP2eWHXrup9MrNr/6qr+y2tH3eEuJ+lEeUdZdGFfQ+2mQlPr0bdj/u0dSZp+bzGvJb61yfBLDXa1+te87ltxMWpmcDbFp3l6STFkU9EvskY4rCLCWnZZRDA7OcnjIIWvuz7Dsj5znIZKf1JUZcwv1wa2Ep8uEB/UB8VS0vO3K5yqLaWTpmips24/yCYUZ6v7dsHV8Y4F6AJHnCfWljY4j5j2En0gxAAwXJySP5igXilJb/h0HCR1aHBcAfBvCzqvof0K0XvXaT29ldBuGcjfnV/caYSjFzicGJHRPMZyx9PHmcEZ3vpwtnOoVvncF80F1hPkFaFPH8aEK4qNFOr4Q6nyMSeKTldNQ4yQgU33OMuLIMUgeSkJWAZV3cSbRycVjuSN9F66SdQM88BpGDy6twbK0rw3DmQcB7a6x6WuXB2uFCm3iuH6QbHnYLBTONUPeccc4HjTIzJAe6fnSZ901M7dWuVZYcUrge2T/unvEu7Xtj1ZS5Lxb51FryQulwnxnkDGn+o5dLRov+zTHmsrkBSGk327lO200WVvuS8AgT/M0AfheA/1lE/ke79u/jVa/dnKGshAWFd/i5l84/z5Q2X5EpAYS5490XO54C4OwJkKF5uoemz1BVcS0gWGHMn/Pfid7wAKGV0uuCtLiMHD1TzZz4Wpl6YJ6JV4YnARYzX88ygSqB1/UzqqbbKjZZYtRAWqPEuAh0Zb+gfRIAUosBmqeMl7FaykK0LbBOT88etAhcKfZGy3duTOHyisYepcbMz1LbCUwhmKfBSIKgrhvmWReYy0n70lWGjR3b0tCxFqdJlqXFPwpg1EM6hubxYPSEjh4H+Fni1DX1nUtmsOwxvp313AQt91/PHh9ZHf4LN0p60Ws3Z8b8Jfe+neDITq6eEWV94mRHAhovDjDQxckeBhgKxPHryRLrlgClvG/JG2zXR4NZzciDGNzdVujbPxjg7kzLitX2eGXwm/oTg15h7EYorGOHmtWALJbf6Mqy4LXIT89TByNINKilkHBaflksepxskanmsdv0exK1lBvawki7yeqWPtW9qC1R00cHcSUKSK7CIpvFbhXi/OhRREgyTQWkMcEzX3EZsTZ+qguAe0y6kNwIgRra+vMl4YM9O3wWAh9uzXki5u7qybSU42geguCnwfinDoW9ZTCs8RhqLIT8gTJRxx8/YnbYy+LvKx7UKaHI+njVSSVQFFZXneFNzH3lMQH9DKgyv0POfZR3F0Xsc5vejQzVPQqWHHKFwTaAZ+PSH/A/lTPYUN14vl0V9SDY33O9cL0L/RBORvnUwT0N7uRRy/PFJ/Lf8x9SFZMQaLo51OVYRFt3U+zzlviMly9pPt0loji0vK/C4k999MUTf5WGv9JgpiU9XeyXgV8AKNf05eG9PzaX32kQR8OXn/a9VVK2X8OGnDVJASP/o2mx2nPBOkCnP5sSDQINmurVFbj7q9H+u8div9MhCj32b+xaar69tqY5e2yuw3bxB/J14anfvk77QECGOTgyzbzD7bE4/1tYWJqp0F1A4PjGjm5uPO+1ODMAhuJuSAIE2cl4L8RYPy9/Nx6ynaiYwi5pU3qItH/iiHvp8cUJ2/qu9VFUH9fTcNWj0KKeCjpxO4quuqfRXRYn2zSQI3TndVPiD3CeIHvTFPV4e35A6gGMX1Fwva9WJg2geRrMZHzTEE8mOK/Z8VicraINVrshGgNb7UGwtcINfPtgY4On5sMa3SD0NKEZ9O9sy6Lgvh1Fs8UpAdLv05hJC+Oq0GO3kr1nwGJl85RnfmdjZ7KLZD7259donSAGVZHR+8HrIOsgdnbCbex5MCAUVWqgys3tT82IZru7sRzaWkIkGB5nJiVTklQRBw1H+4bAYUpSJ10IBxCqoy8yhWvH5VGGx2yjM/KbeWp5JSvK52FCqFcjxpmqxswmFz6tZpra520ittixqH0geerM2wof4Hh9rUpMwVS/3NqoysrELObWmIttcaFB5mMyFkIUuNpzwA6GmTyp/5K9TyflkUn4HgBjKFGFh1Q/yVIvdYmoHXQTl0GIpRPY1Mu+QwrAbH1RmlPidcW5DSoh9gdX81GYim+TmD/nZxwJT6N5twWlVIauXUk2//T36fZknl9lFG06dhJSbhg4zHyHkiEUN+jrKyTKEC6yaXQlMyiiQ5FmeNWLYWl1ANBPU3LmvJMnII4mFZV0z/7wZ7RzP6f9Iwq1A2mHDjPmTHK0xA/4swHpK+4SVa6yZ1tSC94ecg+FD3qeIH8p/opW+TqtA7zmu+nTMt1U6kgGP/+OekgqD+pcyEhFXNq8PApyq76b3qLO7jnf9YmqDxqeQqwCFPZEDHARZTMN7gX6NHJ7t6TNqZUabVa6Duimf3XJp8u0G7w7OXKqewKgqEBhER5og31YpnBgDreXueuoy7Rl/vtS10ejN3nrGHP7l0gQNu+D5YmQMvvYBSFGOPXpEO9rbuM5ZrzYbhJjxiJ1DIcoSnHfQ/icWRjxBhxNeXdx5oGVCVT8yef6pYJXtpVAOOa2FzoVZmU2abHOBoj4nOvBUDtdAlyW6d7NdF4XMtUncbsPbUbgHzXNY0CwyuJM7jjyCeEc2LSTUmFH2HN5/ia+Peh1mY5gBGvIgaU55s/iuDWkaw/VXzW23yxMG2yIB/xQXp+yrh7f6rYAsi13fR4bjQtwNSPq44FBMORoYwdnLYkazxY7xHTnEF/bUByW5+F7AX024BwCgnl6KAMlFiA88+9udf+GzM8JH+S9w3NefzJ9xf3KafvOdmaJY4wpgJAmaIvqnkwFb4Uz5WGG0VNMWcv8BenzOCv/RAkapXgbm0cfDS9jTs4GUKZmd0joafnRvw+mKVFUl6bS9lnKK/cIZAHSp12/UBf7LcpM9jVY21aQrotbrK25QLQUHLlvEu6ySlad8qxjqPNeL/fsOWAmezz9rmP5XJeVLcArwwdgggckXqPILCUb8e77Yzt9FttWYv4KBzP37QX19jJp6n2rqNyyYcAlpIDWP7xFpufWwTrytRf6RF3UB1Gqwb57pXx4hYpqClv4xgbjp8YrPGtghmBQfaJnO//gaq2JATngiC0UEBvJwXKUvEI3xNqM9P4QwYEDuGFMHx4kG31aXAvSvhD7V/Z3eh4RyVmebdi/3bOZvrPTku8+hAZuGPTOb8vMt6cVkn3KrfSCoNR7d+D5gyuh2v6mdN1J3ukHc1FZrsVCmf33tmfJH2g67JWbv8q2AjxWR95W7YMrchd/a0NCxFaJTgtiK+O8fleNBLOzKLevVStZcKoWdPJbF9XKsboC4f0hND/Tj7MC25lvrV+LhZfSMr7im5KyX3NZQGCKkCKlA50vvjDsW0SMpVY51ripI8mIdjJZeldz9H5pPdkKKsyRx8xJ0BxYWQlnADf6s7tWcvxwP1B8mPtK8kBer//0tTsONsUujKBohxXRDescZx0AX+K/3YX3u09Qp4+C5/91OqT+qt4I9wZhKF+xmHbuX7FwVQ5VYoZaF0XGqI7e2Bx6q9GlqbWVfXvKqCvjwO1hXfeH7eWqK4s959YQqG3Y89n5Fn0q0tngcRxLuv0MX8q3ZYEHwZ9IvncXmHHGkA/3Ceq0WarsEacZNP7X8zzr16195fLu4PxN08bj5wF7sQAh90uxuMn+uIDUJdsrOHQ5Y9GzCv3Z3D1fGXj74QO8aIkAiwaVGhU6Z0ObvOxf7iNmfHmM9yZHi9y3hvTfN30vFPIIBfstq7O7FK8aA11Zjr20a3lbcKnyBBRuwLL4iWSVc/f0QgWqlHQf9yzdTtLOaXsMl7FefxtDJfo9hSXQq9uiSn8S2+JTgVi6s/JOV9hbWY8C4KmJ3aQ9c2+cCVLGaul6IeNFCbmzlB+fk1WmlZiWm0le3y0cvmcQVFyNCfrR3nbZ3w6ZrhYxYGGTMVGrEHJXQT/+Kei4ahs0ZHboj1eXfQrnHc/ToIWpRPzMlx/bUiDe4eCrzp0P8IJNDR20NFGvTxFPgk9jfOAy23H5Mx5PfSgPKo63u3BG0QaazxovsmwuZtul4RteaECQlDHVHejecvyYYS8pWtBkX0WpRtTL9d8aMlkcnmpyLjxQWUc1/vGKTzUu7Z5xE1Il2+IB0Cy9UkA9i+11rix/k3+onS4Nq6r58vVNPmp1iQ3W5AsIc+9TXXX9qmUI5+nMc2cNXxne+3T46TradMlp84wTyh2bcyV2c6R/zwa1TzmBudOeOnw2nKSqFcCyRzKOA8dxxBQ9/VjVJruvQkTwvT/8Y++qeZbwu7/1GxCD1AD1MIV0MXcA05n29/6JP7XEeRvh07/zm/zxT2v0/esmAQTgfd8PPXTi2gcNv+dbvx5AtU8OiFy3OKuwYdSM5wCfYBfGwjIXKyOBg4DRnhz5vh95t+31e37nN8AroVThmCgw+0Pjupqn9PgWROYYgBEBqtfMyo8wAnxOPWgzft6bY/2wM2p4PdFDpRUvCx9kOsyO8IVmA1FTMh5xz/CALEpVzljtWo1XlYN5Y2F12kDaO/fdUvJTOTcW2HnS7XRalfgdhM/80LsB1w8dFEgdBBpdy8HehzZQQVJbGqiSQW16zf7WPlN6r+Fcr3Z3eIzWOU5uls5ZUCLprHsmLDpN41uojFletklt+ZeHRw5V/XwAfx7AL7P4/7mq/kF5xSs3y2JImw7eHtrbbtjmqaq5oi+AHfoy4wwJK8SAfByHxUlw7EzQw7/+u795Ac+5XSOtf3+WdIz0/0W8GGUZ/sgP/+lSp6gp+Q390aVbwa25h3/1274Jx8VkPoj1CqA64kXync15HT/zg++Wlfwb3/lNIficBGQN+yEWAuA/IVb5b37HN9UpKXw6VtPVBbCRgxGCP/qjPxH1zTwy+DDu1yJuA7QeypS1pOs6nK/n9PDp3/EvQY5ev7W84tMGKUAr9Pt/6Me3MnrcZUL0YMgxZkAItX4xKiFOQFaZzvOc/XUcbQvV41ncDI8wwX8A4J9W1b8n85j9vyAi/xWAb8IzX7kZ09h6McKcBtiUYePM5zTsx1mshN1nJ3RMnAOEsuw+PZfN0xQ9FACUej1kaXWpbqO0js9Z6s8FlXOmwKuZnLVIyn0cFbzHSIMA5IBPWdfw+7/rWwqb2T16mGXPAfCf/rE/OdP+3m+Zzw8D+EPf+ycATN+eG5scu1IHc1xvwdM0IJQSRej5ZJ5m3A7MSJSSWO9RHnXzdFylBCt70SU+ljio7hySqcRvbpBM61nc12kuOFkcdoLfDVMPePtTekLP5G8iR9sV3XTi8jxxTsMjh6oqgL9nPz9lf4qXvHJTgaenkT8gwQTD/9eo8d72caOYEln6ZdoMwBcs5qZWe9DdjpDR8TQt7/VKAzjLmsCdZba22V6LgdbiOAAlSDRH/c0eXVuiK1EQO1LYOiDEzgYUHHJkPAFEbOMxFAPXmf+eqNYCnXHAOLNIGDKv/zYLVag7gluWyb891zzbcd8aJ+JJns1YGPoYkOMArlfLG8VaXGmAebn+giIn7+fQNaUrM5OoM6pfy3W2wequlmlw13GROjrbVOg1sAyA5yC4WhjvgxJUYrbj9RG3DubbHzbl0nHgoMNlcrzDpjF1ISTHWGRd6idaHzd8myvG+ZxDMQAAIABJREFUj75o6QLgLwH4xwH8x6r6F0Xk2a/cVABPNuWKC8jGZrDgOLND0iItwON/m0GV712lXfdjYL5veAAjwW+u4locmlZ4rv0JjF1H1OkIB9483JpA2++W7jmBpzELGwMIBNkRLVAZc7UPgDv175tapb+ZT7A4B3uSo04dNXfYtpo6AHY++PzQF+ByIU1sMUwx5eiDzuVOx//61EYHz87UdoTMW6uojhmNAMXOvvz7KZAJ4jErB8A2FM5A8Oa0+BXhX/tdvz0qe5glnWNmyugs0zExWWzNhxdD5+GrtY1nzq8LD4Ggql4B/HoR+TIAPyYiv+7RAoReuflFX/SF5hez1tEKLF1hAR/UFusAgLpw4bORaCyyOv1pCX9lpqrakfk6X2ijSv66Um8DKY1pWsjaZFY7Rqn4nUo7eCoCesXqAN8MnP7tOaH4i8Btn6xiGgT2U2q0606mzNtePjXmRgh32Xj/5NYgZ2Kc1hCwtTdgQP3M+uoYiyulG50da0eNcjOkzRA6EPTx56d37J1lU8rzXACi+lpvsqHYAd673m9XQ7qc1JfAD8N6VWgcs2WveB0aj3wW20uE1t/rHfWJKK+r17NWh1X1b4nITwH4bXjBKzd/1a/8Cr0+JfPikbZjgeE3synzEe+j6VatPSeMqRKHXBBKo/PMwD7Y52DM9DSTDEu/VXH2CaKeWegO957OF1pmOYdZ79Fi7kp7OQBWp/mwR5kEooJDD6p/to2/ea/ltuY/5oGrT09XHMeB47gAoFcqko+Kj/oHYE/l1Dy766DX4xbYxCIH9cscaJWx72YRp6ADJ2eC+M/jbnxULO9LQs+nskWTdhG1T4998WGV5RYIfue3f8OcKUQd+9NC8S0MWvJ0Dfm+74cnsxxDcRyC4X0Q83LEgbS+JxKquIqN2eMoFaeZN8KPu22Bl4fzlzJ4ASJfaQwQIvIFAP5ZAH8Z+cpN4OFXbmq+zc2sQQLH+qdaB0CeCO2Hn/pf5qGeXjXujaG4Uh7s+2P2F98Lgt1u4kX5N3mwD6V8RvbS/tZb6SvdS7VOEk7k3cmeI8by6gB0klejztoaNADtRJKevsiEVHxm1cwse7oSv8hViq0S3SCD3QcnOY7tt5Rrka597kBRueDodjXQNgNBmQr4WN+pXOpGPP6qDH3Br9djGxxsuX+UZCJ5XNmz3VsdNcc5v962zJbKOFyxoMZf4/DrMF4aHmGCXwXgB8wveAD4UVX9CRH57/HMV26qzs3SAQ7eiAq4E0PMYc8dOO8r9Ik7f7LEVMRcfp93UmnaMXWzHNTGZRlz3jKR55DDWEDG8w3Wc1uNph62suK0FoUdh57nBwL2vK3sB/fywiUCwrOTdqzG1pbzvDe+q0NR3oETZNRcA9A4kSfY1ZRmLew6Qf/iU5gxMNzfaE345jhwBXBcr7ULYr9sXh3X+f0KhR51gOVbANNFUWpGQFl9yqxDmK9UGIAOwdmLmS7Wykr/JlaFctCsQekqYloO2PuskSxXQX7GNl33fVyu/7WTrQT1MeCviUX89gUvbwPxWVBY0OmC2L3I6403RTGMzrzYMLtR8rGrQN/oDDol3MaHiOTmcss7RBNfEBV7ogxb0jsKqGujDC8Pj6wO/08AfsPm+v+DF7xyk18qzRY7/EHH7NCDRm/1K+WUk7epuN4xhjEIMvCUiXSQEckLSPCyiYHFrjLxlM/vOzgXf5PyZx28vgUk86J7ZyabRlyPUaY/W1bYLHZhpzngd2UtOWnbuK29bgBk13pUbs8vDJ8un/x9qbeXXwBwEank6Z0iqO3G8hcPVeSvlNriFqU6rWIAa3rwWpuTlesun5ojgVDIfwTR9bZfhDrpUPEBFPhNYL8Q5749hRUcJI8ZMLsrvuB2asBzg79/FvmAGCMe572A4NsM7BvirSKs4MMZnmqeHtwcwVdjBqKA7eyA7+1jZiF0EoiXH/dCKGw7hQhcxN9NwXq46R8Ky39uCF7bqZUFrfK6j2aMYZtPNadOwYxoFHMDtJCMwBTykFPgfo7fjA1MlEPttE3fAXCTJ+DsyV0q15txlQwG+9l2C2S72ojI0mw3JCyRZApCMl3tt0FzziHT+stk0GJjhkx2bFi+vVWmCG9EUGKT8s5IcV13gcmKpxfbnJ7DO41puhcYgFN2LkugecbhK8IHeWyufxa/j/fzVYP+1lVj7neNx4DRx4DdP1M7L8vXIXtTJoGbytBXpioDfE1og/vMTGKjeDw/Dqkj8q4ouF1Wm/7GbEcFIn78uU9zQCRoza8YNJbigXa5gYEuKsXtbouV4u2wOtjhks8KquuCHFq/VCC/V8d1lrCpIzH/pd+b6s4VfO/zI+MMMqLULodNPjW0+HwsAN00E4tfxt/thaalDvAzBxOA5wuTNDDdd4JyOfHUP4F3N45QLfV/afggh6pWalsH7q3q7JR2t5o4LwD7E1p6nKT4VBKKwki/b+l8QFj5zG5ZXgftGY+yRP3e696BoAeF4ij+GJJbUHxTHj8Ucyhw6FwNPMQ2a5k6BoPWaJv9lhW3PKTgbcCchS1A+OAmZ8Y0ju7h3T8JE1W3D9VVH6JfNgAIWXsfAbonjOc2pkRcaTr9kN00o7u4BO364TMeqWMJzuq5YpEQBDibIhfg935N15S36zkjr1qt1CnelPNgCGermqzXUoQ+oQIgOI1Lp0BfEHpJeO8guLMiIvQ4U4u/s7ynwFdLutvp09qtrCLuu4L5tTKNHRA9TsdBX3ChG0VR5xnYJ3lQXvxZguRpJgsLamFkhnNfFhRykRiZcrFFnqvxQWFF3oNwZwyPTrf2dbFFIluIciI6F0bAYxl9X7wOADEdJ6CyGsS2pRMGucimJzKGrNwku7mE3bnbFkS348pupdUNqQT4zcWTaozdLXBaqiLcRMutYhgm6MjhtknSvaT0VNRZG2n2QbSns7tgwEqgli/nChEWolPH82sZoIcPdorMNrDjpcW9RcO3+T3QPgrEptetj+MBqx1+jjM5kCzk7hRqI/bOfdDrEA9eyCaH1k6P+uWKUHfi7url1/r+QA4nsIHYmhLTvJZON9dg3EO9vTf9t5lpzAnDbhZxn82uiewbYzD2ahQcW/NKLb71s2ZO7hZiQPQFOa5K8EAlGU5UsPil23RFfFw+qz0AX/QrZRR2fELnUds+ZCr9/nYAEHjfCyOYz+j6ZtoFyHR9KPpRB+xD5e+AZHONLc95XgluzFa5XruFlGVB6EF5d2UBuaFbsG6pmZzotrHYtW9xYj/g+/K/Xb9yOZVpSP2kvPh398s5kPWWG7CpPdIwsD71ti4O/zYd7r7WrSviAaN2M+j6lae0x1LDlHv+YZ6GhK53uT0pMg/wuO0/2wJhFhyGYRd/J+fiw5uR69OSYSmC41OeEjL7ZvU14evD+2WCmgrp4dGpUweWWytVlnNMh3pekd7vBuOwrQtB12cMV65ajndSnY68ncWSKi9PrUM1NuDlltZl8fhn+a4sz5WuZAzcGDhc/lb2jaxxHwVryrT6LNzKb5e5mg5s092akcwK2E+JLPmaD06Qrux9tzfE1BqH5b2nSkz897MhRz+LuDR4y28jq89yACyLg/eDjZ/GCKdoqXOuYnXTUPvWB8BbDO99OjzUbTbSQWrBjNZpKEwKJ4PBO01r3qD4vJqWDZ8dUKYRPL04GeiFvSCnps7M/FnYbkULm4wys05jpBxpQFKvHQxFBAN1O7MC4WWJa1LvQ+eTNO4sdB2PSzGAIsW2/lmf3r6ehSwj+mY/U979TMYMnfXmf9QLiw4Q9bghTWOCOxlV41CFAMIWkRno6l/e5TnvKPpq+MqSSVIcIvaC9bweW1O8bNcbxXaj/THn1dF3rjv5iFvKBox+Y1OZNKhKle72NWux0bONgbg3rX9JeP8+Qf5XK3t6xMYwYJxESMUkkL3FRmpuNf1LWtuVLl0r+136UysaCDbZWIm0WfX60pq5F2wppV+Itp51m05uf5qhlptOodv1PfNbMhjuaM1u2sVsa2mLm1LYVFAS+us27QS/ZYqdlHH+6vrY4ocxBjUNDfA1//XaCsV0Vc81rxtcjzfPYlynqmLG2AGwlpnhoOnrjJMmBQricg5+KW9ZMefAVSq6qwa4xGY7ukVTMBLm/oAHjvx8OLxfEJRq0Z4zdXyJD2bnD0sF1rXT7uV3Uvy9xYZS36XuPh1Y0xXwKP4VCXPOeT+3fYJtyNq+3ZdzK2ePxxtjezjbsnJz4SRcASuobAoobStxpI2NOa3M7J4/tsv92IIA79nL8ko+p3WgaaFUblRK2DFuNpQGKmGIj10u+968tXDWZXa/JFh374yP6Gs31pK1LGMVmvUACrjfKObF4T0zQSlHZD83uM/rnjqKSFD+m8AgmJst75d8W6az34KYBpetGyTXzRXIzYxNJLR8Aa2zpLeCD6oz4Aq/1430/On58GkgvgjRB9jZwtEtOc/bSuIv2rsQQYlZwtjkc0tP7gGh+1eTseuy7ekxI+6ru7Zh5OQB8dLm9Lx6sL2ot7PjZTqw5BnPwFPevIiHja4dIhjC8W7L2nUk6fAqn02m4/s6o4kS1kKfGd7/PsHdgNL649ToiuQ5bvZ7F3JlCWlavSCpFLtP1TaTtu3VLOf8tyft04z6ifDdrIOM7GABy9qKL2HWXe4dQw+5aeqzBPFJik+5qr9WjgNnHdoHF7gUZ26lZMkbZ0HmydYOJiETT8s8/ZmhDGIj/LNqATEv4d84F09ZdMIBMWPpejtFP9c70uaYFJw9cClcDwPre1pSZkt5NRhctK3w7KTb7MqKt1Ld0ddufFbXyeufFgE+0BMjp4FpfZtWAGzgpPzeZ5XPO8huz5iHmIPQJXhHT0Vkq3ha3o2BHmzLchdNj8sQxRE7f28tAjQmYfI9BwB3bIR/Ohvwzzhj8EyuS4htCukHpc74cZrMzh8IZy6ccwWdfFIEGMUD9pjiOzMKplQAkPO733b5IJfnvJ40fd53j0nMer2bxXYw2K26cl7TuM7TW2KBRjenE23KcO+bxpJbGhafjRwCDH/XZj+d242h/4YAfMLNMwz2295x0cMH2Cxdf9cp3TmY9HBvOskKw/6/bYNK/dyx1ddanD6tdAV+dEhXuaMym3uU5sa9nu7Mtxj13giZ23Lq/T3YPrrRVvLjzE96Y4I+2zlbtk0yqIzKtOqTCCeSsU/Wsyszk+wXt6GdIwU0lC6k0sXhtTbq9PuthlBUkhHtXBqTquNWm2UZFc6z6/0ppwR+EWOC2LeXglv/vpafsb33ER4GQZnnCf4MgL+uql8nL3zl5tkA8XC5XBY/02jxfKAWoLvBxO76BcPK1enDLq+zwKcpx0kl16xHTEUo63ug8Bx14CGdjuo2fbjTFmesUnXPUdnPp4o4Mv20jBO/I38vU70WL6a2m6znadi2v1N1vqQeQLxzg2Vo7fBcVvKwQRTg3vPrMQkp0QRq3DfjDah6e/s8gjm6dlzfCvQoEC4yUb8edEagz3BGmy/sfK7n+nXOZrcyEsd8G6D5nFWKfwvAz9Lv78Z85eavBfDn7Pft0MCNQ9B3/jsOyHHY0e1Hucdp/HsWU/PfOeT52hkA3sqT77scRUasabnsNb9N/iaPyBY/uIITHHq+G2W7NYC3AHbDH7jrs1W022mfHaYDbcWWJqc76mNjBw3uezLfF2GXZldPk7VH07Uvtmxqo7dQ0lE9n8rXseHSPDb1P6sNlvaS6A/glp7UMZJpvCwt/aR0fZVL1zuvBMKHQFBEvhrAvwjg++jyN2K+ahP2+dsfyeue72yMMY/G94G9iXPv79EQDeppsW/4W3VR5BmIk1EeOMSA+5LgzaX2PM5CV1m5oWi7tH1kxdTI23b4goGVZNmXPWK3yiDg9/cY97+1/iRNG1RX6/utewN1mC05SrbWMq3z+3etyf2wghdtbH5QdbpOn86/S5j1UwWuOnD6jufWpvFVjE1iLK8vnfEk+/FI4Ip7m7iBxcCimSED9cvCAMUBEOZW1PLfzHtul/E/Ly8+3wITfHQ6/B8B+HcBfDFde/YrN88CM6V8vSW2TMY/dyztFlvb5ZOUOq8L3NdxZxqz5OWsTaBHPWG6vnWNU98OvoF1pqBpSlZkSRM6f6ocSjRKu0bNtA6GPqteSy5liBiAmklNYEwAdBZT09aX6gxVKD3pwItj4w5YyNHYhWrkzYPFmWCJdyfc8jmXa3iQZxW0yHR301uEMTR9jgUA6SsD1WxMhG98U+U58xLyITa2vnEfSEi9CrDqn/QoUaViVDwKT89ADz3IqgavhcFHXrT0dQB+UVX/0ksKEJFPi8jPiMjP/P1/8NnTeIt1xW2o6Fawp9993/12Jtgu3UmzyuFpztkonY+3lfm8HNdjJjOsY8EmNnXd1a203Y7VtLLrl418JhBPezrQ7GS5NRV9DqPn+CXdHZL1aBk7OU/9Vc8Tu+hNNrx/vzO8qb2dAR6t7atst8YVnTD+oJvg3hYVCRaY03KJlKyfuv0emiiASt+joDQw7op6MzzCBH8zgG8QkX8BwOcD+BIR+UG84JWbv+LLv3TbYmzxb1p7ub0SXPLSOwsiHh/7NtyB1S1WOXRAxmMMg/POV10OY6ItTLO4Lnq038EmdD2aPuu6f5463BDDNqKMnJjcCtzG0pGZ8ladpwc9Cjo3+4515SRdfIafaQXJ54Lsjj0+a5HkRnDS6kxtkurH3BEAgMNP45vuGAB+NOMiMxojrxnmbIbTDACimjuiNoZ/lY3q5aQ8Zlyeh7+rG5kXgLlJ3M6U3OqC0ufrp8N3maCq/nuq+tWq+rUAvg3Af6eq/wpe9MrNkm/5tB8LPe7hOU74uwrazGKZPqAO4jUvpmJr3BVEJi3hetf4G5lVydit/hkalsAZaN1pg4WdepNo1us0rWrJnv1IS/1a3FPGfAJW91S9t58P0l7mjJuf5/3rlWp/JIxmpV/gm+JMk5UHuNxhsZHLxg8bC3Okx1HfE9OmLT//jg3QJwTdYoKej7cNNyAPPF3+Slu0OBoMsEhxu5HuhNfsE/wePPOVmx66xfZr9qX+xvnU44zxPC+oWao9qO5YJss0dVaB69WejUzlc49JcfQ64MXUpMldxKgAECrk5hV2Kj7WNPG14fDik1HFsM3a/uIhV8KrNh9tC+Oq8+2AlXOUvJlp3gI9YLINBhPWiV3dl3oAQT1m/ziaeLvTlItk7X2gOoiBV1MDejeGi3FjUr/cfWwXg9rp2pscqVDPy1+ROgHsiJIV9n4PVVtMsd7cvJzIAT0A0PWUxmfoB7xPHzQiQNfygoe1S71/cguQlAQ0qpphfWl4Fgiq6k8B+Cn7/qJXbm7yLN+3jOgdhbB+8cnTzpNBa+H7/viPvUvRXIoyCGMAKQ9hZ4m1HdnBTTeaM9E/Vv+q94UqWu4Zhr2oqWfLefAx7G+jTyc2nbOTkHW5TwyddY6MU7m2aaqIIRz//BVGzK7i2kIAdEkU5/Y126LYPTWEOCSBWRy3wegzk50RaboGWafjOT2ubZl6U5Lb71mPgnHSqqYozC4l6puxs73OyMlLwufWY3NAMKRb091+78y63ne8P6/xXrSv7R2HZMOpIGWLBKnbD/zJ//Ktls0At3NRrMB6Oz9lxre5/xwf3COukQ76RQ4580c5U85BPv2Pz/NOcb8lbAkgG78wiNlhMjuhsTA0faB+PV76Pq7QMTDG9e6JPZ7fLvCWtceZYDXWswDEiT63w/QZ1in0ql/8+dLwuQeCFh6tWHGEb5T3pcDlg7orx3d969dD5FiAlxXh3GeJh3B3PYPkYamDvYRP6B0Sap7m9i0xft8/HwavmWBhe2f96+GP/eh/8er6hAzUj7qjuajNOuV5fjnSp/YS/2zKs6d4FdD58ujiLvC0sbjiOhkgOG6CoL+H+hbwDx21T22f6f2+bfft5WYS9V2ZZNRa2V21OX/yLcwuPugrN/l7sIn5I5yyZ36Rnseu85ZpCBArqLvg0wV+AV3m0T8z317mLRZ73mU+qT1nsmtZIAXJ3Os+LuA7v/nrwpDGezX8CDHNPC7xkm0vbxgrn3/f/6M/Xsr/Iz/6E6e1uRf+yI/8+HLN+yinTq2+SD7wDrF9XRzYltfmfiFbWwACqsrwFNsa1/XSlL/5wGoZkaeBkr8m1R+j87VOB7wxxlztJ5/gTj8/84Nrf7wmeFnV/SDF/9dnYw6K3Wh2A8jhbczN3vOhqn03+wpkfgpu3DtVwp71481xBqx+N+CuAeAZy2EZbk7B4693ZMKfSJ3CnufkZdac0H8yYpBvxg9RYUlGuKO9lA7L7zb84R/+0y9O+x3f+nUANq6SqI8xpOXT7lngKfldnYrbe2hezV7TZWlfJLusl/xHf+Qnb8vyORbCC7m4GmosAFHvM1LHY626em6fiPNo+KDvHX40zjr1OPfX9PRlylAy7dMQZiIr64vlfs6zyfQQEO9oDLMDrANgV2YFwk19l9ibPYKkhO6dSgBEHeQKfNc3f32wlV4W158Pzu3G4vt/5O1NW3dhYRAMcFTf6qCvsxHnVffyL9ehnO2SJhdBqnEvFgo29Va+/m7D7/qXf2u4d0QEx+Vy178b15C+R2DVw9v+QhRdKpef4ft9G+Fz1ifogX0yZ+zr4bzQp8OVAQAoLyx6xK94q7Ne25G7+la/4+10qyIDYod1Ple2zPP2fSCP2d+V8fu+7RtL+btdXvddCtVB/5qwH/B4GIO207M+dQUZ71Mz5+m0PBr2Hd/yzyc7PHGPZB0Err3KR05Di0+wb0ubU+X65MlDde+/iZgwAHZfZIhqbphdW79PIPxgIHiLyZXKmxNh1xz3VrTu5n0Wh1jTLSA8ZQVkyR/pR+YC8bsBYAXuyioeYX+53SCp6DpR24TuvN+Vs2HnycB75s+bvvhusVrgVoqaKljG6nLx76WcM12kafPOeNbyzw3To4G9okKgf68fnCzUFyLpNk0xqk3+2+NjnUmUVm/AxSyQjajPOvK6xL1M+xgQcvqXhg8Cgrs5fl9JDKVUnRtk7+S3U+LttLLF6HEeb862O0+Zqbhcj2XO3Xg2FHdT0FJvoQFrRuPQI5jv3EaRLFdb7vnJ21MTbFcg2m0rOQIwarX9bXsbltHs3fzkeFOK2AZCQKRF8n1ju7P9VljZIPWj/9MNsTuxxPvEF/O6BL38G7IoErg02S5U89GuMxcQfUs2Os8lnIBoeQOYB1cgFdQ/NjOPzHZDAvrvNhsIJqh5NiIDYKt4Keuslcr1ja69JHyAk6X19PfOctAS6Gl+t3yE3fNydr9euweFdYBqHyDYD8vdTOiRTtSmpEBuG+D683+APxjgTDAZ6rriKeHfKU9I2Jgu7ysmUFgFRZJUChNkjuVa2RFQ4q6mSSTrXxnI2tLOAtmP28u9VV6AmU/pFs4TYqHOndcOZj9rJFlzyvR0k1dPZ9+Q5m385nElgJ8Y5NoU9R7p2PmsR9Y8drWgqfDc65etdz622hgP255k6CzZa8PnvE8QwCkA3k1maXcdG6uAN6xOxOMVKR9YphPhszybst+oRjLGqgAFuBuKa4szr5WNNRErRVO0Isr0qjDybU6r1OoFRHZCg4+u2edusaT/5vjLYk+Avb/MoiMtf+Hny+/rznNcHbu0i/umx2HDYr/XWcJa3sy7AlD0Y58GEuYlItr0WIQs2QbwDXsXV0aNWJr9kSHZXSK3FvAqkaG68c+05JwSOy19TviHAwRfEDq76CF9KM/IczOwxsys5n2SntV290pGVzTBRsmmliZgeDklnv/IJ4odALfoSZcrENXIc+N1DpBg7JxG9+3sAHixVUd//nQXzz8XdiaAjPP3IIe04i8Eevw4qLMtW909c2uYBRBuGOyOGbL8uyGd7D33HPL499713Pn5bnEGHC8X4/48OS/FxsF2nCy/nktInMnh5B3IdnuZ0TUgJGEqk+cILwvvHwRvOHjZiQ8IVfARGv5IQ0Qr0iTxJB0B0llMbZ/px2nxpKt2xs3i8n0NTjS7PDF1OcmfZbj1botihfV8ChQyh1AMjidMTNZrurvPoBkf+bu66PqUuc3iSE/K5pbAJdmUu07R/Ckbbo/41ExRT2iZmVYXAwFrbaWFwbOsaPe8fNUqKb/mU5VffmQ56s7lkcVse9r1nX3xi8S777uxbH1hTJZx6oy3ubTsh5aN9Lop9oUTxQgfhgna1MhPMAZqp6feVkX0G2whI2xfVO1TtwS/RCndvtqVn9wMRZLsmgAwy3Jogho4DoU4vUPNUs/Ma7luCYssVQ1568QiE9V4tTHJSEIU83VVXrLELvkC9OpLcUW3FCLpQ2ylKzR8WrPq0mzb5DYJKCRIpMm0nTUcqE+6xLxNABxiH/MU6/BmWEMplXsYhA6hRQk/JVxHxCvGy+T31twR7i0Q2gJI+FhJdN76M5n0BQ7wcZ/q66RcBbHm4WSKmawip9fVADuwuyFhoQmAbzLd2uvxaCkJ6v7BPSHskLsZzzHjeBknPQsfhgmCLCwMADZsr6wQxr0ExFw8iZxKeuV/1UBPa2Mv/g9Je8TyMtuL346pG2bb4cP/DSlp+sSyeAdzTs52phuI1KSBZv5YlbUwA81Bxv9yn1RyybG4jFUVtaXhflIDprqqzRPEzK9uU+q1KUOzXu8D3JB5YXbCC0G1HqyLyQSlSLX2eUVu5zJLjIYZ2zcHk2VjF0x5sJLqyZ85La9GWV2JAkAblNPwqzOLLtw+LMMo3Eb2FFKZRTStKQRm3dSf+We817I/Dg+BoIj8NQB/F8AVwJOq/kZ54Ss3Lb/5Sd9vxKZG4bhavvaJ6ATWygR35a1O+lpM38YTzuqNpCsDSrHnANjUVbEMA6GRcuaNOtVN2gfYFS7s9i0HOCegstaBSsal+HKSfWRUpY26WNJxvJNKzT/RBI3wTd7XId521X19a93X1er43BoDi+f/dlnITvsd9umpSJ7dR3mQixACAAAgAElEQVRpAe0O9V7WpIDepysNqHXYtfndcKNtd323rLQ7EAqgcfK6bmjcPRw4b/XXhue8cvOfUtVfr6q/0X4//5WbYAAU5+E3ndg5aG8trtskQevfMOrv7Xev65ntbbfr9O+7tEsFrI5H7siPv6V0LneFnofKA1thb7VzZbm5gNCSng2eZRGhtZeD4O6e12XLpgnoiu8k0twAswdk3snroRvMU93jtBvGusuX8y+fQAG0ygCJOHDOrEuS7yNUvo/bC0W32qEKn+nP2m9ZvJKUobNllJgu520R3kV4zXT4GwH8Fvv+A5iHrf6BF+VkJmzbSdSjSft3DMk7uXUC2cfoghMmFDHLbGBPz2c+mZ8PyN19yBG+M2ZCENhmPmeXbS+Y29FoHy6DQc5lzTjh5/E4Qql8auE+FhA2O0cp42yyIF4FdTb2yAosDxSfGtU+aAyewUfIB0wsd2G31CzLPZO1zKhyjjjrs6vLDVCImMSsSuoohxZgiE2W/Dd6eA8NbgFslC+ed3gRMy4zQnKVeL+ig2bcvM3e66o+1deUzM0eeYNSdgHaOUIo8yStd2aSdfnkueFREFQA/63MB0//M50vT3prr9wEkD24rVFdCOkguDQ8+ibcmT4PHs90O5/g/HRX955JhYGzMJozjweJn3RRtmAEsxnhxxoOSKjgJpShD7cO9VF1L0fXjcgliXocBB769oqdD459irtFnAW4FjeDs8W87wDnDJjjrttVep5T/QUS8i+Bxh+DUnzy6CIXzU2Wvfk9or4SZe2YlbZ0gcrryK4+3BZiOt3auZhDrlvJez17k2ONW3rjEbXakF1gn2+YdxO86kJmWzW+ynXC1ePba8KjIPibVfXnDej+rIj85UcLEJFPA/g0AHzhF35BUf4z4WvjJriwg12x+uqSeVXoysZuT0RsezE26PjPKQINzN2Au2e9H9qztonbwSDAR/dxPF6dUq0h6uNAeKPcki8qiBX5W3+sGSCOjs8BmAat578ru84+V+NyNnxVlfSBygsKUmWIwjZsjftagXg3iigNZd0sgnX64xlgBd9exi5Or3vJU5g4VBbo6Qojv9Hfs+y93A8FYoQLGZ6ichd4gRs59oTktXPoh0BQVX/ePn9RRH4MwG/CS165+RVfpn7CCDCrs5wHpn4n0p+CoKqWJxFgcyHeTACAyuOWzuOvdOkVBhyNQfu6oO2btl5HkbV/r/WYG2SdVZ3lsQNVD+UU4QLwjwH5DqhW9ob6m0ZxZ4KcZudkX+4BkOMxl/aZv3F+sX9EwLrpcebrR88MZoKVL26o1n716/ESKQbKXV5w/ZXluqe8jz/MQq19I7skC8k2iVUVN0WX7+VjIPNdRMXSIoLYyuW/32V45OXrv1xEvti/A/jnAPwveMErN3fO1PL7tHd5S8eeIcT3hy3U3vLxHq26CLJa0/s5VjbqvMfr03PcAaDQf90SCn3u/nZ5bsu4wwJKfSgP/75zup9dL8YgG6UwPaVrXVdaFlQgWgds+rDl57E0o2/jgHRvpweTce/EScDmzyzVfLeyb6/oeenp17bfM8LZmEqN6rLWtFi+bzLbXHt7oTZ3K0t5/DReK6+X7BEm+KsB/Jg1zBsAf1xV/2sR+Wm84JWb96ZUj4WqVqrOLBpTmHcjegBEzlgWuaZlas0qI7ilW2K2zijfKxBriaRLmpuOlV3Q/knpH5xyd3/evgBnmpzysdDZxO0+zmlbmfUA4Cl/EhveWVdBZpd3v7t36s9e1UHxpRntGHpyWmAaHhfNQEVb12iN39BtcWMokkTeakmeak4RNrsQhRhdm1ns/N/cN6Ppb1+Uq+luje3d6LkVVjMC4NZDUc8Kd0FQVf8qgH9ic/2tvXKzO4Fv1y0ZUkZ0dtXy9ehdCc/8Rjy8eEpTFHKzfnVrunUjzi7/m4EUUihP9TxOZC5l0X0llOs+N/ehqnqqzartjen8rUGwb69Wz830qczw/IszgT7Wd37Ss/JVaeGZjSEqCEoItxZoIgesuN5lVbLPSLQCnH7hzG+4YYStGgSEBuyUBYNuXbiwKSHrl9Ajb6hjg9PFav+JTCnbGX3n6p7H6eVyitd6qj7sUVqSu+8lzOdjbOZhK2BZRofKrkP2Sv1Q9g+A3W56fQpSd3p0l5f/vuWTc9BSavPzPKtBEWu4lzN3khPP4ZSUrsk43RbDDJwPplr/e9P8M6bKhvCW8ToLBWwae+vGqiaU7Yh+rnYG/DkoP6sKgj4Uw3/bdSrEdoN6H5DSD9/j9nnWWtbLRuj98GFPkWmD8uEB9gzilEpw9uzFy8MZIJ3d69+7f+255b4krjbtW8DFHXIljY/PtzP/eIzxc/n7tuTrz21HTl9mIpqacuaHPgu8ICXA8mx6zk4eY+0lb/ReuRdmP0rQ0eeklQDwW+mWWzcZHUXb1pdreM6230X48EdptSmZLH6MpMzT2OTuvZwMboJIHE+e+bRtA3K2BudX+pRs96jXjBky3wC9Kt46XctrkgyWRNouElB+98DRGXSNpjnw1cvrc0uXKduM1bXMobnON+5lBP5poITNFhn2VVLPc691YxpuA382leoX26xgvl5teZOroYcFPP03XfOythjywMxgrf/5vV3e7aL9m57taCdVh8rm4TtHvwByrG1eKSSlWdqKS2OzSKN7N7NZpyivDh8UBLeKG41Awd0XRf35cw0iuUm5ripSGmOiAOzgSS+s7ydEDhSXpYmYgysTnXXR83yAvHCwB8GzRabt1O9sP0ABwgawZnwmuzl/BvdmrTiODb6z+I8wr84ZgHoEf7pXqmTh39NYp8fhwLABwFuS7FYx7cZpOmeDnHbQAD+bmj9UPnIqnCOF6tSO2ipAL04v3PomWHroqrNMh0mC2+FWHAZA3d3JcBurHw6fM6/c3A3ixcfVk94fK5kPdVJlFWt27RF6Kp8VaFOH3e8TvxNfY59LsM+e18mUkAf9vQWaW+6oXd4uY9Sj+RP7dPShaToB4Ev8bVu5QYywt4nLHnH3fr5bDJAN2005fAqJ87qV9vM87/XhrTK5fxe5CQjbVDXSOdjoRu+4f9eCT5nYMgELCr6vUK5WdyDdrG4XJmj/vFKPPggTZN/Ji9LDmsid/RuAOETKLnhnINM94vbyAdYRjIEvPeiDujdodqADQX8fx66OLwEQJSb86OppMJfNPZbtbJHhVJYUaAmcg8pu4wZulpVtFCXdDH7CzRKz5P9AexOLXoDOs0SdwisQG6l1wyBvteduytyZ4KDriM9OoXoL2+xDs39FBJfnrLjnjWcO9N7mqX199jHPLHy9If1gTFA6QL0ybAfnjXLmm9ES3m71oa9EV38THyiwSdfKvOXK4MGqgE3PWl1ugFSheDzCeqCZxpnSLteD9WoMil38R9jS4k/dxekNtZtaxpgleL7DwB4J6mXGhQ4yN+qGtT4py0lbW8qVe2aurl/dRbRbtAkZoklMv6XCXoFAKrbIoab7NFV3ec/qtG37LtSm3Myy5722ahkLPn5f6Rv84Eywi+/XhG5mnambGNy6728TOihZQggO62Dd9tVO7tPMl5q4IlYV7l7PmJGcDAdmoud+IMMMSlXrzBZ1Xw3ZbNEQIPbQKch/dSLPLQhyw3HLIxTsCGnIgPRHefpdeWvb5HDfttudwVOZ2Tmj8bwPmadTd2CqEnUjsGjDNmEHYF7s2ef9qDHQMHLzw/PtWoOGnpTDyeykuqFY0FXHqkS7e1MSd3FMXXo7BOqD+gS7QgOs7HmtKLr/Ppl+nTGVnQwCTKUVgaqfd7fGfcTQ7AE4E7IV5adPVrtafTNn0/1eF7f27Af6/9u7upjrjqr8rP22opSQUhFTKUlL0qDYiBAuqBpjwJ9CCMbEixpJqnhpsBoTpfbKOxKNsRf+xKCQaIMXFbVpIkjQ6ypEQirlExRjPwVbbzTRC/Dby4uZNetZa2bv97w/nnPevHu173fO2Xv2zJqZtZ71M7P3jiF25DUvrLCHLtkb0nUQZjrtfFusWq+kAS6rEC92gY6Xz247byi0tKDURSbJyx4FynlhKJ4cGylmpofqBIRi/NT2VwYrgmEEnGDY12RYtck/M+GPYkMDn4i9cQzGc1/ziYMII4boa1QBsDMwF7mb2WmvIJgBcERlvpov0M3/arSXQtVdPESuVytYxJxfOTsKIUeLOeOaI3HSvgmCuXHVG+uso64sPjQgrIKSyjZA1JVxX6v/HLQG2DsB6sjDVG2PxN8tAqpqQumMXSl438RTHqNupTy0jkHug76SIfeFCrgc1DZP4zrwkwFJKshWYVPtoWTUF4tMuu1GO3uYqbM90zQ2rtVhjrJBSWmB3UF0nQ4aDq9RAxgekxUl5RVB8xh4i0xb4VrISbFX2nkHkksP+Fzp41mUT0Ta01Gsbkvcj6g9RWcwpHxtXriYBk9gye1Er/AybG4PtrukMOy7ApB5BkQwTdK9sCdvFVpaUjnNeCn9ZW5G13J//DUCxZhNJFdd/rYSpxha2drPVWOSx3BQ1nKKZgTLPy3e6tNROW8bQGl0xQKdQ1xCDjLPgNg/0cE5EyYv0AEXRsAmNpcqR5V8pVq+kyFyDYNwSpxQy0+sCf/yFh33T0ejPsyFoBfWfI2D7Ji3te0vTSZSKKXo+71mOJb6MCjM3Nv/8Vhm8CICOgALa6UZs9M8/eoENqcog+Ru7qT3gzzKbq6MpxVqObIdPG7zfK09ayRvJF/PyebIpXc0reJeZlI1oogPr7A68u8xRV1w+ZmSo9PK2PtTKHWVG7mkoGX/IDj5e8FL5NcpU6KIge06O7rkqEwQyASISnmCPbS9MaxdR8ZxEn+yirdD4ZsyD2PQy7/Z68p5OCPbAuIhbQ9MY6/JwxYTUFf64vVan9im8nAFiF9VTA4pje9uGHpTNihjVnxkIJaMSWhDfQZumedq4z3k32WEDcouqZkRT6tExqz9LZTheZZw2hP/gY/0ma/L5/pCPvO+UYwcEvCcAJgqUJnwKxoAhvyp9HycSlUg3ZBXzswTbLpSvGGZpHdy7NJdDdkK7TcniNTxataXtvpw3sysl23bzIKz1mgzWOTOh/yhV7RQhVtAW9QYidxpinVqTq/SGjCA2naIZkSPQNiUqutT/G5AmPm161urYwfN62el035IlwzGWk6yK5MzU+p7GXdRidE8rXvFp4MzndyFgdWoZBXcZHRwsaFYsA1QXlLIc4LO6zMwdCPCfIzHyxZ3Okq23DzKhrfVW2w6x67rYDB29ugX6OB3jAygIpTjsQ5CLuPNpSsNdxkiEQCDdNtoSB1Msk+1rNhjNk73QLLSL/HlzmMPouyFKMRXEUFKTh5JtqoBHEZ6RL8XqSlcz9uILroo04EHKejqLoGVNMSogdF4x0JxA/nomtHxs3mdDg6rqQ8FhuvRoX+9TFvU6t7grjSIVqjqiq8UDUZX0G9h4F0RVOQSPL9Mu753+E4AHwLwAAr77wNwA+d877DX2yDPjnCbsSwICA0kRMo2ioVZiklyaoXc/sGNOT2f9KWAwG4gd5EyVm5NkZaqMeCy9LdO7kEDZVzaIkgFQgPBsQeLEH3zlwVbT0FX4muvFPd+rgHh6Div4lt9630wAKzWVRW30MudDMY5eP+rffKWbPDXcsAd/KlCMYeIqF3bykgDv0tZDU5OZw6/Y9kZvF2nGBM0Xvv2Li5Tu3qCTwD4uKr+uIh8A4CXA/gVlPcOf1BEPoDy3uH1V24KcMKrkoO8gBUcHZ/puMDezuYueh/yjb3KzhtjIDDWCPlMmWfxh0+2TdDZLVqcJ6pf/FcIratll+5SAnJbLJK1Bv1w3lpsyXEL8Us1sf8jkLeW2lN8RD2fK+NrrJ1wrPM2tTmkarxwDa2fXqbL7QZTmtxW+zpYWFpcIFG620jrwlNTXG8phHBS5sZTJ71gcCqQFymco8Q0nfFbQPuN9q3eKpehP0kvOneDncC2EMmgSL899GgXW/trlIasOTPxcL/TU+NlAyM23rp2VjoVBEXklQC+H8BPoTDyNQBfE5Ezv3dYILhtOllvj753XXbXsfxWhcxlpWXOK1epLlNIF/5qAXWKg0hKN9t14kqThZZD1y6Zra4OIiQ7BJ5NiVQNXiA4aS1o+rQxCGpPiwVMUy/GNczw1zbG3vTAlW+WIhhsoHSqJ72QH61fIHUTm8w0gwXtQJ11PgSYytJXqIvnIOazeqgRkWLQ7NrE2+TuR/mYbM+h3/FiD3Wd5VYFImkh6qwmPYpwO5h6f/iBC44USTbaacuPk0Fs/FrUA1ocqmFlFyKTj853btDAs2w1IywDM1eRd31RbY2KJPn91uiEyQ+5Rxi3e017yQm+HsBLAD4sIm8C8BkAj2LH9w4LvXLzFXd80+m5kZXBlEk6hWsKNPBGTFDaIkECwXJ9sshUhfms7S6FqI/GwAK3Giy/F0/eRy008lTMG+EcHws+rG9WJnfArqdxiG1ze9FsOPB63wsuudd4FtFbBkLvvymc7W3jfvZ1WG8XuOC+5bSC/REACtABYeAV6e6S6jXxbZFm7JS9R3JnNAtZcBi9bveuY3cCRxQdtLSPtduiAcFICIXbD75WLztrv3nuvA9JQyU1z9gr0Vi1KnjAzipo56BdQPA2AG8B8H5VfVZEnkAJfXcipVduvubVd+ntt9++WDZM+lJeQ6qtqsIWngDCoZZX5Mc0jacCKvGeyf5fA8oeAU2G2vXqTr3q8l0NSwsZnpsp57M3Eq+NHppSXMJgZ8oRPEmg7lXyOnkMcpu5DJcdqEX0gGtdrCbRuxDYm4hmvj4bhWQ8zvrwELtugo8LG6Jdq2M+mnFSaQ5eGGPQ/IQc17i1YODIu13khXiKi2EkIwUlCVQJhSROeazb+7nmaeXw2xajzM9YNmIr7dq16bo+j7nO2660CwjeBHBTVZ+tv59CAcGd3jvMJCLDOxXsHIBOkXv7VOcwWXcGQbeKCAKeQbBZbEQhNQtPDuAgH+KuugiAuYQ/82i5edDP6AjGu0KyKPfXD0alJdY0jEP2OKmiTqGH+pC9Hwm+xnpfQc4QeS4Ogtxfqlt1KNy7AGHmbTUUPwPJYLxGhmuk9NYnAN0dPMx3qxs0RkSaymZe7MIo49EYW0jP9jUDIYPOYt6UfOC4/9KZDN9D/anjccI6uY9Ghe8M68+fh3Z529xXReQFEXmDqt5AecPc5+vfIwA+iB3fOwwAJycxJ9jYTyBY2w7pUg/NpD4mXjFNrGRxg6rmW8asPS3epCji43IJCGK7zmwDvVbOAVhVcSIxx7FoaZMXopM92qtxWSuwVlAFK054ywGpUrsc9vozF1mx7Na8XMauN1aCQjJQrnhP9uAD6klL7Dd+q0PiZVxrJgIX70vrcQAXpmaU4DLRASe1ZwDg2zKWwZ37YkaxtWcvsic+c3sNwKapACEZnsCj8ZwMPXvXBhSWlyVrMn6lRO7MSpTCV3bHhugV0zFNU8W9T567dq7+tmiq8V+z1SL9g1y9mI3XhGnxUem7066rw+8H8KSUleF/AvDTKPBxpvcOZ0+QAWtEppiKmU1lG9Vy3gWo83ymqNxsT8r8SQPFrm2geHUNPBwYu3AWgE7a7vfkaCQnc3sfzvg165wR1hQ03TPJJVtGOe+t8rJhu5BIDYfjplkbq3meHVADqz7uS1q0tLdRw4G49dzg4kSiQWGeWogVWVlELRurafLEuY9XrCeDJ/fF6uLrXRRHG9GXt2U1D6aC5pzk30AQIrAbo4PXGXgQt9+xkmB5zXOiAl1Hs//RIHYNKQXI2uuGz+SDoikh82NylCo0QHcAzJMbfeFJpo6H89BOIKiqnwXw1sGpM793OIfDS0CoPiPBA7RrTFFu0Q3r/KfpYQDuQhPYCDDRal3IbUA93DKFlCxMDhQ6z8WbE1cs2xVagLwIZ0y7ubKoAXMDxToGth1k4GGYwEsbrznUvxTmAGieoAGholf6uQFwG5ShEWAK9fDxxiuNGZVp/RH3zhgAWQEs9JPwXDrFPKdyMHkj76p51wjg0FZfaxltxxIQygI4iC3OpcPcBoeuSO/scLRonzZS5mNxCGs8B15SAVUtQOFdRhngAaOI8tKWerjo8DJC1M5IscfpvE5T0UG7nbW2TmNo7WYQnFtdJrd7A8FLI/IEw4pn9qxSviULiAmiPX0jhxTN2s4zLN8mKIrf2lOp1jRaJzNYRQGd15x/bDhagXC2dupE1obKudmfVahzEMfIU63cvFudPSwFKaPQZ+AroV4LgwdhofPgFlqJX/sMOayqP6fRKJnv7bpqZxm3sah7dVE2zS7l0AQTpnDOxpk9jpMaLlndXiY98SVW3vo76zx0NvN4qipOpqkqeNqMTrwHzzQBKsN9DINLn0JgK/6UJKFjAdibx8V+1fokxpXefDIebl/pvnUD7R7Tm1KQI6J1r6/XNTUPeMAC7Q/d/cXjp9PB3ja35k20rgp1OpytVE232QXQZyiDXE58dlbCKfN8crtLnHuydgA0JgzR/EdBbmPSTD1adUngJyoDs6iDMbV6OdmdTvoxsWrKl+Jwx/L9jHCb5jFLrzBpLl25vZpJXJlKzneqkT6VbKxOFQQtJVKBgvZfSAMKobnxh1l4VJLnmOpUcfBABC6OPG2s3NupOzHnepLaKIqe8tc+/OWvVl6W22yhTkLhsIAB8tLJcPKzrs2W+psVeX5KX5b0kmyLt9vkDmGM+Curg+ef61zUjZKeEmEditVZPxtkruDHWWnPIKi4BYVlo8ZF+lDKFacO9oy2CsvL5oB7WqoKbRHx5JpFs8JtzCmsdn6EwjPiphNIfumLNuW18lK03KHaBCNspC31trsDmvAUvpTGB7NWcBB0KzxtC4RSJ6uatNCelZpKCDDN2hSi7G13oDQ/jlMUBB1RuQi8ibnCfvMyFfYw2Olk8vGXMpfzfIs8I18VFJnIE5yrw2X9c3Nnz/Nzjw1etj2FhkCw2asCHbPeih4UGYLWIwLV+OTr0phQaG/AHOXfFmU4Q2oeYXAk4xjXKnJmrICozZRtPDJefbysdBlTXiiLnqg1JkLb76VUZncwmefW1KKlMZzPGANVEaYxqgwsypPghIzNxRdEjA7iCXZ2NwPfIJwK15uwj6yBxgnM1jNfxgO9tCeL8Ky1Gzwh5dulQJ5JJFPGcaKen1TjF3TlW8hD1t2hlVnimogDhOvYAlfmjR2q25U2/EtA2rYbcZMdCJq31i/ymAJNk+d1i6GrnpvxO9UxAT8017w767z3jgHQ8n6QGha357o537lPLhzmhXhPStvayk+St4ApoOVmT16hn6YpDFONvBsQ2oC0YyyvVjgNM4NvXUYrQxG4RQ3VpXp+BmBCYagbC/4E3Ktu5XRuzE1tt0H58y1f7ilGjRAI5coDRLI8UbxgBo1TCpdBBwqHe3PatgB0FohAzcqpthdtN0AiC5ZzeOW0Jb55EmFzFIQ5D/Bst3O1hLmWZ5wBCJvVAqppqJM6Ay7F0uH7rEigR3UMiHN7VPtCWSoyqFY8NqoAjzC+zE8PGsOW6PeALxmPe1joMogYhPXFcSz9zxtCcqRQHRhviz1hCnPt6qmW1zz+yp4ayU5qKyyOqYYnnreqgkyYoTIvtzXnhhw+srYSm0faPUFpMosKfPn5lj7ODIJmVGpU0P5D0zstT+boUgQtcjFwM/taefMnaVd5IM845EtTn+yYgaAMxvI8dNBHaTWHicxOtwiRjhkAKtXnCeCRG0/lUvtSJaoTIPIgi+VMgiqAzBWwSCKXIKjVNwIJRddfVvYRT9wnt5hYxJhYNzE44DtxhxbnUJuj78wP2nXr1EL8lfMT7cGzOkPNqTnhe3PV64n1llpanpJAWLrKT2DZywyvIVSlsQ2AXnM4Njb8uP0wDiEaKmGxRy1VFs02wYCfBiCyRl4UrXoXVA6LQzwmeW55kcn0RMD6ZkwwiHada2PJp+Zq8CXJFoObhGqkK3MyTfGBLOekPYOgxE+SogZ0g2MjELzVVmLjyI+8QMBA0L3G6LT1AsxhwVytYti2Q9gQJl5cXWxlkyeQXx1p9cdwXJBfKN3nKQEkQc6e4FJo35PUJ/emeQh1CGxFZ7T6mL1ABkKZMk80j7Pt/6x329Cc5twU7/Noqpn7WPd9Rl6yUWss8IXkIdl8pkIdpvse1TgPyehlFgdVjecqe2Z+zGpaNTPiLRnY26u2yxPH3VMLeWEa92jUioHpPfbyfRQOs5EBItCPDIF9Lnl59msiHmY9ZRx2oAOEwz3LTSEIAH1biSul5RluzXMEQbisjZTRyvDmShfSZcApQrACggD6e7e0HetAEOjd906R+qRv531VEIxEixc7htDWvpycZkSKxc/nckjcgadIt5ePy3h9vg0mg2BTpIQeSgDTUh3ViHgbXt5zVP2ihNVtiutpiVAqGVvrzxjEbFuWgVUro+qvVMDpY8phewTUluHsqXmPtMlaBBBtMg3zbtVkPlXRxoHzvf3c+PXREzQPHjZ/XCfCVC4CYIZaofJmBNdeQrYr7T8cTrHKaBpHQpWVJ5TLXsPgO09qO7bAY7aCnaCb3ogrUDvRzvft5+8gReLKc+jLoLbMt+Ut+36cRjuVW/Esl8Y91+04wIq/7rGOACnzwimBXnby+KIGiYuz35oI07Qj7eTpceWDsh0Atgim/dO4Gvej9oGML9o3vr1tYGAH/I5lw3jsjzno+fEuV7vQ/+xpdqkM9ON7UZLdw6ZLaEzkJQD/DeA/9tbo5dGrsfG9T7qqfANXl/erzPcdqvot57l4ryAIACLyaVUd3YJ31LTxvV+6qnwDV5f368r35e043GijjTa6grSB4EYbbXSt6RAg+HsHaPMyaON7v3RV+QauLu/Xku+95wQ32mijjY6JtnB4o402uta0geBGG210rWmvICgiD4nIDRH5kpQXth8licjrROSvReR5Efl7EXm0Hr9LRD4pIl+sn686NK+ZRCZbabMAAAOtSURBVORERP5ORJ6pv4+eZwAQkTtF5CkR+UId9wevAu8i8gtVRp4TkY+KyDceI98i8gci8qKIPEfHFvkUkceqnt4QkR85DNeLfP9alZPPicifisiddO7MfO8NBEXkBMBvAXgngDcC+AkReeO+2j8j/S+AX1TV7wDwNgA/W3n9AIBPqer9AD6FM7x6dI/0KIDn6fdV4BkAngDwcVX9dgBvQunDUfMuIq8F8HMA3qqqDwA4AfAwjpPvjwB4KB0b8lll/WEA31mv+e2qv4egj6Dn+5MAHlDV7wLwDwAeAy7Ad35Iwf/XH4AHAXyCfj8G4LF9tX9B3v8cwA8BuAHg7nrsbgA3Ds1b4vMeFGF+O4Bn6rGj5rny9UoAX0ZdqKPjR807gNcCeAHAXSi3oD4D4IePlW8A9wJ47rTxzboJ4BMAHjwWvtO5HwPw5EX43mc4bAJjdLMeO2oSkXsBvBnAswC+VVW/AgD18zWH42xIvwngl4Dw8uNj5xkAXg/gJQAfrqH8h0TkDhw576r6rwB+HeVti18B8J+q+pc4cr6Jlvi8Srr6PgB/Ub+fi+99guDynd5HSiLyCgB/AuDnVfW/Ds3PGonIuwG8qKqfOTQv56DbALwFwO+o6ptR7i8/hhBylWoO7UcB3Afg2wDcISLvPSxXl0JXQldF5HGU1NWTdmhQ7FS+9wmCNwG8jn7fA+Df9tj+mUhEbkcBwCdV9WP18L+LyN31/N0AXjwUfwP6XgDvEZF/BvDHAN4uIn+E4+bZ6CaAm6r6bP39FAooHjvvPwjgy6r6kqp+HcDHAHwPjp9voyU+j15XReQRAO8G8JNaY1+ck+99guDfArhfRO6T8hL3hwE8vcf2dyYpz+/5fQDPq+pv0KmnATxSvz+Ckis8ClLVx1T1HlW9F2Vs/0pV34sj5tlIVb8K4AUReUM99A4An8fx8/4vAN4mIi+vMvMOlAWdY+fbaInPpwE8LCIvE5H7ANwP4G8OwN+QROQhAL8M4D2q+j906nx87znB+S6U1Zx/BPD4oRKtO/D5fShu9OcAfLb+vQvAN6MsPHyxft51aF4X+P8B+MLIVeH5uwF8uo75nwF41VXgHcCvAvgCgOcA/CGAlx0j3wA+ipK3/DqKx/Qza3wCeLzq6Q0A7zwyvr+Ekvsz3fzdi/C93Ta30UYbXWva7hjZaKONrjVtILjRRhtda9pAcKONNrrWtIHgRhttdK1pA8GNNtroWtMGghtttNG1pg0EN9poo2tN/wdqa7VP/aMryQAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "def plot_img_bbox(img, target):\n", + " # plot the image and bboxes\n", + " # Bounding boxes are defined as follows: x-min y-min width height\n", + " fig, a = plt.subplots(1,1)\n", + " fig.set_size_inches(5,5)\n", + " a.imshow(img)\n", + " for box in (target['boxes'].cpu().numpy()):\n", + " print( box[0], box[1], box[2], box[3])\n", + " x, y, width, height = box[0], box[1], box[2]-box[0], box[3]-box[1]\n", + " rect = patches.Rectangle((x, y),\n", + " width, height,\n", + " linewidth = 2,\n", + " edgecolor = 'r',\n", + " facecolor = 'none')\n", + "\n", + " # Draw the bounding box on top of the image\n", + " a.add_patch(rect)\n", + " \n", + " plt.show()\n", + "plot_img_bbox(image_ori, probs)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.9.13 64-bit", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/text_recognition.ipynb b/text_recognition.ipynb new file mode 100644 index 0000000..e9fd51f --- /dev/null +++ b/text_recognition.ipynb @@ -0,0 +1,282 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/opt/homebrew/lib/python3.9/site-packages/tqdm/auto.py:22: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], + "source": [ + "import os\n", + "import cv2\n", + "import string\n", + "import numpy as np\n", + "from PIL import Image\n", + "from glob import glob\n", + "import matplotlib.pyplot as plt\n", + "\n", + "import torch, torchvision\n", + "from torchvision import transforms\n", + "\n", + "import torch.nn as nn\n", + "import torch.optim as optim\n", + "from torch.utils.data import DataLoader\n", + "\n", + "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "LABEL = string.digits+string.ascii_uppercase\n", + "label_dict = {idx : label for idx, label in enumerate(LABEL)}\n", + "num_classes = len(label_dict)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Net(\n", + " (conv1): Sequential(\n", + " (0): Conv2d(3, 32, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): ReLU()\n", + " (2): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(32, 32, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", + " (4): ReLU()\n", + " (5): BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (6): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n", + " (7): Dropout(p=0.25, inplace=False)\n", + " )\n", + " (conv2): Sequential(\n", + " (0): Conv2d(32, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): ReLU()\n", + " (2): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): Conv2d(64, 64, kernel_size=(3, 3), stride=(2, 2), padding=(1, 1))\n", + " (4): ReLU()\n", + " (5): BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (6): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n", + " (7): Dropout(p=0.25, inplace=False)\n", + " )\n", + " (conv3): Sequential(\n", + " (0): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))\n", + " (1): ReLU()\n", + " (2): BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", + " (3): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)\n", + " (4): Dropout(p=0.25, inplace=False)\n", + " )\n", + " (fc): Sequential(\n", + " (0): Linear(in_features=128, out_features=36, bias=True)\n", + " )\n", + ")" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "class Net(nn.Module):\n", + " def __init__(self):\n", + " super(Net, self).__init__()\n", + " \n", + " self.conv1 = nn.Sequential(\n", + " nn.Conv2d(3, 32, 3, padding=1),\n", + " nn.ReLU(),\n", + " nn.BatchNorm2d(32),\n", + " nn.Conv2d(32, 32, 3, stride=2, padding=1),\n", + " nn.ReLU(),\n", + " nn.BatchNorm2d(32),\n", + " nn.MaxPool2d(2, 2),\n", + " nn.Dropout(0.25)\n", + " )\n", + " \n", + " self.conv2 = nn.Sequential(\n", + " nn.Conv2d(32, 64, 3, padding=1),\n", + " nn.ReLU(),\n", + " nn.BatchNorm2d(64),\n", + " nn.Conv2d(64, 64, 3, stride=2, padding=1),\n", + " nn.ReLU(),\n", + " nn.BatchNorm2d(64),\n", + " nn.MaxPool2d(2, 2),\n", + " nn.Dropout(0.25)\n", + " )\n", + " \n", + " self.conv3 = nn.Sequential(\n", + " nn.Conv2d(64, 128, 3, padding=1),\n", + " nn.ReLU(),\n", + " nn.BatchNorm2d(128),\n", + " nn.MaxPool2d(2, 2),\n", + " nn.Dropout(0.25)\n", + " )\n", + " \n", + " self.fc = nn.Sequential(\n", + " nn.Linear(128, num_classes),\n", + " )\n", + " \n", + " def forward(self, x):\n", + " x = self.conv1(x)\n", + " x = self.conv2(x)\n", + " x = self.conv3(x)\n", + " \n", + " x = x.view(x.size(0), -1)\n", + " return self.fc(x)\n", + "\n", + "transforms_img = transforms.Compose([\n", + " transforms.Resize(size=(31,31)),\n", + " transforms.CenterCrop(size=31),\n", + " transforms.ToTensor(),\n", + " transforms.Grayscale(3),\n", + " transforms.Normalize(mean=(0.5,), std=(0.5,))\n", + "\n", + " ])\n", + "\n", + "model = Net()\n", + "model.load_state_dict(torch.load('./models/text_recognition.ali', map_location=device))\n", + "model.to(device)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "tensor([[1.]]) tensor([[1]])\n", + "1\n", + "1 1.0 ./images/1_10043.jpg\n", + "tensor([[1.]]) tensor([[1]])\n", + "1\n", + "1 1.0 ./images/1_10060.jpg\n", + "tensor([[1.]]) tensor([[1]])\n", + "1\n", + "1 1.0 ./images/1_10059.jpg\n", + "tensor([[1.]]) tensor([[1]])\n", + "1\n", + "1 1.0 ./images/1_10104.jpg\n", + "tensor([[0.7827]]) tensor([[5]])\n", + "5\n", + "5 0.78 ./images/12022041104595530.jpg\n", + "tensor([[1.]]) tensor([[1]])\n", + "1\n", + "1 1.0 ./images/1_10029.jpg\n", + "tensor([[0.6902]]) tensor([[3]])\n", + "3\n", + "3 0.69 ./images/1.jpg\n" + ] + } + ], + "source": [ + "for i in glob('./images/1*.jpg'):\n", + " image = cv2.imread(i)\n", + " image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))\n", + " image = transforms_img(image)\n", + " image = image.view(1, 3, 31, 31).cuda() if torch.cuda.is_available() else image.view(1, 3, 31, 31)\n", + "\n", + " with torch.no_grad():\n", + " model.eval()\n", + " output = model(image)\n", + " \n", + " output = torch.nn.functional.log_softmax(output, dim=1)\n", + " output = torch.exp(output)\n", + " prob, top_class = torch.topk(output, k=1, dim=1) \n", + " print(prob, top_class)\n", + " res_label = label_dict[top_class.cpu().numpy()[0][0]]\n", + " print(LABEL[top_class.cpu().numpy()[0][0]])\n", + " res_prob = round((prob.cpu().numpy()[0][0]), 2)\n", + " print(res_label, res_prob, i)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([1, 3, 31, 31])" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "image.size()" + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAB8AAAAfCAIAAACQzIFuAAAChklEQVR4nJ1WUZbjIAwzNpDe/xY94QQCZD/UqA5JZt5bf7SUgizLBhPe77eIiMgYQ0T2w+Jhqoq/VBU/sbi1Ng7zCH7wWQ0IjGFmRnRO0tkYI8boHQDRD7A+Ap2bMcBnCEFVVTWEgGUhhH3fgdJ7771v24ZQWmv7vouzaGYAMrOcMwZADyEgIBhJkCNAa621VrjHZ2ut9y4iMaUE9Jwz0GHYOXHxEsEH/WEAdL8sikg6DMTNDOHDgZmZGTThTqhE6VQ1pYQ1sC86NsMzgiqllFJ+fn6QQK9PCAEMWDzwF0IAjy86BYXRc2utlLKua2tNnbE0QcvnwFfLSRlwIXegI12gNmXVF6uH885O6KpqZiy1J0bXk8VM0tkYY1Ym3JkX0TvD5BTQVK8iElmnCNarzGRyNSoE1DCJVMGfR/9wxxdoLsuSUuKp27ZtqjDsZ70CFIhIFT190uPvh6v5eLGNCfC4XAAlvug+3bszP48cTHBPx9hb5FJKyaTv++65/A/6L8r4E8SywTbqJmfp/ka/XXdbr3/Sv+d+xb06YC1euRPhiz6Fed3jZzxxX+Yz999Du3VA1n6SxYqEfc4qT9OTAhDhyRMHvfdrJuLtBnmuhKt0vqhmdFywtdbpOvOXOK5MbEDgbGG8jnBEeu/tsC86ttVaU0posGaWUlqWxcxwy49zW8g5L8sSY3y9XujDIoKusK5rrVVYM+I6zvSMaa3lnD00O990iV4Leq53X2eqmnOeksG3DZsGoufLCYGelPGdE2qKa0NT772qPI7XEgcfdAhE7vijlIKn0nThkDvmn9Cpz4yuqrVWvCamKvSXmuc+zi9Wr/6pZkgT6MSadCc6+hGxphSKyD+iMq/r9imv4QAAAABJRU5ErkJggg==", + "text/plain": [ + "" + ] + }, + "execution_count": 43, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "image = cv2.imread('/Users/alimustofa/Halotec/Source Code/research/ocr/from_scratch/images/C_13549.jpg')\n", + "image = Image.fromarray(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))\n", + "image = transforms_img(image)\n", + "image = transforms.ToPILImage()(image)\n", + "image" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3.9.13 64-bit", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.13" + }, + "orig_nbformat": 4, + "vscode": { + "interpreter": { + "hash": "b0fa6594d8f4cbf19f97940f81e996739fb7646882a419484c72d19e05852a7e" + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}